TSTP Solution File: HWV107+1 by Otter---3.3

View Problem - Process Solution

%------------------------------------------------------------------------------
% File     : Otter---3.3
% Problem  : HWV107+1 : TPTP v8.1.0. Released v6.1.0.
% Transfm  : none
% Format   : tptp:raw
% Command  : otter-tptp-script %s

% Computer : n004.cluster.edu
% Model    : x86_64 x86_64
% CPU      : Intel(R) Xeon(R) CPU E5-2620 v4 2.10GHz
% Memory   : 8042.1875MB
% OS       : Linux 3.10.0-693.el7.x86_64
% CPULimit : 300s
% WCLimit  : 300s
% DateTime : Wed Jul 27 12:58:41 EDT 2022

% Result   : Timeout 297.04s 295.27s
% Output   : None 
% Verified : 
% SZS Type : -

% Comments : 
%------------------------------------------------------------------------------
%----No solution output by system
%------------------------------------------------------------------------------
%----ORIGINAL SYSTEM OUTPUT
% 0.11/0.13  % Problem  : HWV107+1 : TPTP v8.1.0. Released v6.1.0.
% 0.11/0.14  % Command  : otter-tptp-script %s
% 0.13/0.35  % Computer : n004.cluster.edu
% 0.13/0.35  % Model    : x86_64 x86_64
% 0.13/0.35  % CPU      : Intel(R) Xeon(R) CPU E5-2620 v4 @ 2.10GHz
% 0.13/0.35  % Memory   : 8042.1875MB
% 0.13/0.35  % OS       : Linux 3.10.0-693.el7.x86_64
% 0.13/0.35  % CPULimit : 300
% 0.13/0.35  % WCLimit  : 300
% 0.13/0.35  % DateTime : Wed Jul 27 06:43:06 EDT 2022
% 0.13/0.35  % CPUTime  : 
% 297.04/295.27  ----- Otter 3.3f, August 2004 -----
% 297.04/295.27  The process was started by sandbox2 on n004.cluster.edu,
% 297.04/295.27  Wed Jul 27 06:43:06 2022
% 297.04/295.27  The command was "./otter".  The process ID is 16330.
% 297.04/295.27  
% 297.04/295.27  set(prolog_style_variables).
% 297.04/295.27  set(auto).
% 297.04/295.27     dependent: set(auto1).
% 297.04/295.27     dependent: set(process_input).
% 297.04/295.27     dependent: clear(print_kept).
% 297.04/295.27     dependent: clear(print_new_demod).
% 297.04/295.27     dependent: clear(print_back_demod).
% 297.04/295.27     dependent: clear(print_back_sub).
% 297.04/295.27     dependent: set(control_memory).
% 297.04/295.27     dependent: assign(max_mem, 12000).
% 297.04/295.27     dependent: assign(pick_given_ratio, 4).
% 297.04/295.27     dependent: assign(stats_level, 1).
% 297.04/295.27     dependent: assign(max_seconds, 10800).
% 297.04/295.27  clear(print_given).
% 297.04/295.27  
% 297.04/295.27  formula_list(usable).
% 297.04/295.27  all A (A=A).
% 297.04/295.27  nextState(constB8,constB9).
% 297.04/295.27  nextState(constB7,constB8).
% 297.04/295.27  nextState(constB6,constB7).
% 297.04/295.27  nextState(constB5,constB6).
% 297.04/295.27  nextState(constB4,constB5).
% 297.04/295.27  nextState(constB3,constB4).
% 297.04/295.27  nextState(constB2,constB3).
% 297.04/295.27  nextState(constB1,constB2).
% 297.04/295.27  nextState(constB0,constB1).
% 297.04/295.27  all VarNext VarCurr (nextState(VarCurr,VarNext)->reachableState(VarCurr)&reachableState(VarNext)).
% 297.04/295.27  all VarState (reachableState(VarState)->constB0=VarState|constB1=VarState|constB2=VarState|constB3=VarState|constB4=VarState|constB5=VarState|constB6=VarState|constB7=VarState|constB8=VarState|constB9=VarState|constB10=VarState|constB11=VarState|constB12=VarState|constB13=VarState|constB14=VarState|constB15=VarState|constB16=VarState|constB17=VarState|constB18=VarState|constB19=VarState|constB20=VarState).
% 297.04/295.27  reachableState(constB20).
% 297.04/295.27  reachableState(constB19).
% 297.04/295.27  reachableState(constB18).
% 297.04/295.27  reachableState(constB17).
% 297.04/295.27  reachableState(constB16).
% 297.04/295.27  reachableState(constB15).
% 297.04/295.27  reachableState(constB14).
% 297.04/295.27  reachableState(constB13).
% 297.04/295.27  reachableState(constB12).
% 297.04/295.27  reachableState(constB11).
% 297.04/295.27  reachableState(constB10).
% 297.04/295.27  reachableState(constB9).
% 297.04/295.27  reachableState(constB8).
% 297.04/295.27  reachableState(constB7).
% 297.04/295.27  reachableState(constB6).
% 297.04/295.27  reachableState(constB5).
% 297.04/295.27  reachableState(constB4).
% 297.04/295.27  reachableState(constB3).
% 297.04/295.27  reachableState(constB2).
% 297.04/295.27  reachableState(constB1).
% 297.04/295.27  reachableState(constB0).
% 297.04/295.27  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1(VarCurr)<-> -v1(VarNext))).
% 297.04/295.27  -v1(constB0).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_20,B)<->v5939(constB20,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_20).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB20,v5939_range_8_to_0_address_term_bound_20).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_19,B)<->v5939(constB19,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_19).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB19,v5939_range_8_to_0_address_term_bound_19).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_18,B)<->v5939(constB18,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_18).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB18,v5939_range_8_to_0_address_term_bound_18).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_17,B)<->v5939(constB17,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_17).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB17,v5939_range_8_to_0_address_term_bound_17).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_16,B)<->v5939(constB16,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_16).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB16,v5939_range_8_to_0_address_term_bound_16).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_15,B)<->v5939(constB15,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_15).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB15,v5939_range_8_to_0_address_term_bound_15).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_14,B)<->v5939(constB14,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_14).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB14,v5939_range_8_to_0_address_term_bound_14).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_13,B)<->v5939(constB13,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_13).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB13,v5939_range_8_to_0_address_term_bound_13).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_12,B)<->v5939(constB12,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_12).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB12,v5939_range_8_to_0_address_term_bound_12).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_11,B)<->v5939(constB11,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_11).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB11,v5939_range_8_to_0_address_term_bound_11).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_10,B)<->v5939(constB10,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_10).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB10,v5939_range_8_to_0_address_term_bound_10).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_9,B)<->v5939(constB9,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_9).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB9,v5939_range_8_to_0_address_term_bound_9).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_8,B)<->v5939(constB8,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_8).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB8,v5939_range_8_to_0_address_term_bound_8).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_7,B)<->v5939(constB7,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_7).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB7,v5939_range_8_to_0_address_term_bound_7).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_6,B)<->v5939(constB6,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_6).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB6,v5939_range_8_to_0_address_term_bound_6).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_5,B)<->v5939(constB5,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_5).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB5,v5939_range_8_to_0_address_term_bound_5).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_4,B)<->v5939(constB4,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_4).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB4,v5939_range_8_to_0_address_term_bound_4).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_3,B)<->v5939(constB3,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_3).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB3,v5939_range_8_to_0_address_term_bound_3).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_2,B)<->v5939(constB2,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_2).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB2,v5939_range_8_to_0_address_term_bound_2).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_1,B)<->v5939(constB1,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_1).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB1,v5939_range_8_to_0_address_term_bound_1).
% 297.04/295.27  all B (addressVal(v5939_range_8_to_0_address_term_bound_0,B)<->v5939(constB0,B)).
% 297.04/295.27  address(v5939_range_8_to_0_address_term_bound_0).
% 297.04/295.27  v5939_range_8_to_0_address_association(constB0,v5939_range_8_to_0_address_term_bound_0).
% 297.04/295.27  all B (addressVal(v757_range_8_to_0_address_term_bound_20,B)<->v757(constB20,B)).
% 297.04/295.27  address(v757_range_8_to_0_address_term_bound_20).
% 297.04/295.27  v757_range_8_to_0_address_association(constB20,v757_range_8_to_0_address_term_bound_20).
% 297.04/295.27  all B (addressVal(v757_range_8_to_0_address_term_bound_19,B)<->v757(constB19,B)).
% 297.04/295.27  address(v757_range_8_to_0_address_term_bound_19).
% 297.04/295.27  v757_range_8_to_0_address_association(constB19,v757_range_8_to_0_address_term_bound_19).
% 297.04/295.27  all B (addressVal(v757_range_8_to_0_address_term_bound_18,B)<->v757(constB18,B)).
% 297.04/295.27  address(v757_range_8_to_0_address_term_bound_18).
% 297.04/295.27  v757_range_8_to_0_address_association(constB18,v757_range_8_to_0_address_term_bound_18).
% 297.04/295.27  all B (addressVal(v757_range_8_to_0_address_term_bound_17,B)<->v757(constB17,B)).
% 297.04/295.27  address(v757_range_8_to_0_address_term_bound_17).
% 297.04/295.27  v757_range_8_to_0_address_association(constB17,v757_range_8_to_0_address_term_bound_17).
% 297.04/295.27  all B (addressVal(v757_range_8_to_0_address_term_bound_16,B)<->v757(constB16,B)).
% 297.04/295.27  address(v757_range_8_to_0_address_term_bound_16).
% 297.04/295.27  v757_range_8_to_0_address_association(constB16,v757_range_8_to_0_address_term_bound_16).
% 297.04/295.27  all B (addressVal(v757_range_8_to_0_address_term_bound_15,B)<->v757(constB15,B)).
% 297.04/295.27  address(v757_range_8_to_0_address_term_bound_15).
% 297.04/295.27  v757_range_8_to_0_address_association(constB15,v757_range_8_to_0_address_term_bound_15).
% 297.04/295.27  all B (addressVal(v757_range_8_to_0_address_term_bound_14,B)<->v757(constB14,B)).
% 297.04/295.27  address(v757_range_8_to_0_address_term_bound_14).
% 297.04/295.28  v757_range_8_to_0_address_association(constB14,v757_range_8_to_0_address_term_bound_14).
% 297.04/295.28  all B (addressVal(v757_range_8_to_0_address_term_bound_13,B)<->v757(constB13,B)).
% 297.04/295.28  address(v757_range_8_to_0_address_term_bound_13).
% 297.04/295.28  v757_range_8_to_0_address_association(constB13,v757_range_8_to_0_address_term_bound_13).
% 297.04/295.28  all B (addressVal(v757_range_8_to_0_address_term_bound_12,B)<->v757(constB12,B)).
% 297.04/295.28  address(v757_range_8_to_0_address_term_bound_12).
% 297.04/295.28  v757_range_8_to_0_address_association(constB12,v757_range_8_to_0_address_term_bound_12).
% 297.04/295.28  all B (addressVal(v757_range_8_to_0_address_term_bound_11,B)<->v757(constB11,B)).
% 297.04/295.28  address(v757_range_8_to_0_address_term_bound_11).
% 297.04/295.28  v757_range_8_to_0_address_association(constB11,v757_range_8_to_0_address_term_bound_11).
% 297.04/295.28  all B (addressVal(v757_range_8_to_0_address_term_bound_10,B)<->v757(constB10,B)).
% 297.04/295.28  address(v757_range_8_to_0_address_term_bound_10).
% 297.04/295.28  v757_range_8_to_0_address_association(constB10,v757_range_8_to_0_address_term_bound_10).
% 297.04/295.28  all B (addressVal(v757_range_8_to_0_address_term_bound_9,B)<->v757(constB9,B)).
% 297.04/295.28  address(v757_range_8_to_0_address_term_bound_9).
% 297.04/295.28  v757_range_8_to_0_address_association(constB9,v757_range_8_to_0_address_term_bound_9).
% 297.04/295.28  all B (addressVal(v757_range_8_to_0_address_term_bound_8,B)<->v757(constB8,B)).
% 297.04/295.28  address(v757_range_8_to_0_address_term_bound_8).
% 297.04/295.28  v757_range_8_to_0_address_association(constB8,v757_range_8_to_0_address_term_bound_8).
% 297.04/295.28  all B (addressVal(v757_range_8_to_0_address_term_bound_7,B)<->v757(constB7,B)).
% 297.04/295.28  address(v757_range_8_to_0_address_term_bound_7).
% 297.04/295.28  v757_range_8_to_0_address_association(constB7,v757_range_8_to_0_address_term_bound_7).
% 297.04/295.28  all B (addressVal(v757_range_8_to_0_address_term_bound_6,B)<->v757(constB6,B)).
% 297.04/295.28  address(v757_range_8_to_0_address_term_bound_6).
% 297.04/295.28  v757_range_8_to_0_address_association(constB6,v757_range_8_to_0_address_term_bound_6).
% 297.04/295.28  all B (addressVal(v757_range_8_to_0_address_term_bound_5,B)<->v757(constB5,B)).
% 297.04/295.28  address(v757_range_8_to_0_address_term_bound_5).
% 297.04/295.28  v757_range_8_to_0_address_association(constB5,v757_range_8_to_0_address_term_bound_5).
% 297.04/295.28  all B (addressVal(v757_range_8_to_0_address_term_bound_4,B)<->v757(constB4,B)).
% 297.04/295.28  address(v757_range_8_to_0_address_term_bound_4).
% 297.04/295.28  v757_range_8_to_0_address_association(constB4,v757_range_8_to_0_address_term_bound_4).
% 297.04/295.28  all B (addressVal(v757_range_8_to_0_address_term_bound_3,B)<->v757(constB3,B)).
% 297.04/295.28  address(v757_range_8_to_0_address_term_bound_3).
% 297.04/295.28  v757_range_8_to_0_address_association(constB3,v757_range_8_to_0_address_term_bound_3).
% 297.04/295.28  all B (addressVal(v757_range_8_to_0_address_term_bound_2,B)<->v757(constB2,B)).
% 297.04/295.28  address(v757_range_8_to_0_address_term_bound_2).
% 297.04/295.28  v757_range_8_to_0_address_association(constB2,v757_range_8_to_0_address_term_bound_2).
% 297.04/295.28  all B (addressVal(v757_range_8_to_0_address_term_bound_1,B)<->v757(constB1,B)).
% 297.04/295.28  address(v757_range_8_to_0_address_term_bound_1).
% 297.04/295.28  v757_range_8_to_0_address_association(constB1,v757_range_8_to_0_address_term_bound_1).
% 297.04/295.28  all B (addressVal(v757_range_8_to_0_address_term_bound_0,B)<->v757(constB0,B)).
% 297.04/295.28  address(v757_range_8_to_0_address_term_bound_0).
% 297.04/295.28  v757_range_8_to_0_address_association(constB0,v757_range_8_to_0_address_term_bound_0).
% 297.04/295.28  all B A2 A1 (address(A1)&address(A2)&addressDiff(A1,A2,B)->A1=A2| (addressVal(A1,B)<-> -addressVal(A2,B))).
% 297.04/295.28  all A1 A2 (addressDiff(A1,A2,bitIndex0)|addressDiff(A1,A2,bitIndex1)|addressDiff(A1,A2,bitIndex2)|addressDiff(A1,A2,bitIndex3)|addressDiff(A1,A2,bitIndex4)|addressDiff(A1,A2,bitIndex5)|addressDiff(A1,A2,bitIndex6)|addressDiff(A1,A2,bitIndex7)|addressDiff(A1,A2,bitIndex8)).
% 297.04/295.28  -(all VarCurr (reachableState(VarCurr)->v4(VarCurr))).
% 297.04/295.28  all VarCurr (-v4(VarCurr)<->v8169(VarCurr)).
% 297.04/295.28  all VarCurr (-v8169(VarCurr)<->v8170(VarCurr)).
% 297.04/295.28  all VarCurr (v8170(VarCurr)<->v8172(VarCurr)&v8183(VarCurr)).
% 297.04/295.28  all VarCurr (v8183(VarCurr)<->v8174(VarCurr,bitIndex0)|v8174(VarCurr,bitIndex1)).
% 297.04/295.28  all VarCurr (-v8172(VarCurr)<->v8173(VarCurr)).
% 297.04/295.28  all VarCurr (v8173(VarCurr)<->v8174(VarCurr,bitIndex0)&v8174(VarCurr,bitIndex1)).
% 297.04/295.28  all VarCurr (v8174(VarCurr,bitIndex0)<->v8175(VarCurr)).
% 297.04/295.28  all VarCurr (v8174(VarCurr,bitIndex1)<->$F).
% 297.04/295.28  all VarCurr (v8175(VarCurr)<->v8176(VarCurr)|v8182(VarCurr)).
% 297.04/295.29  all VarCurr (v8182(VarCurr)<-> (v6250(VarCurr,bitIndex59)<->v8157(VarCurr,bitIndex59))& (v6250(VarCurr,bitIndex58)<->v8157(VarCurr,bitIndex58))& (v6250(VarCurr,bitIndex57)<->v8157(VarCurr,bitIndex57))& (v6250(VarCurr,bitIndex56)<->v8157(VarCurr,bitIndex56))& (v6250(VarCurr,bitIndex55)<->v8157(VarCurr,bitIndex55))& (v6250(VarCurr,bitIndex54)<->v8157(VarCurr,bitIndex54))& (v6250(VarCurr,bitIndex53)<->v8157(VarCurr,bitIndex53))& (v6250(VarCurr,bitIndex52)<->v8157(VarCurr,bitIndex52))& (v6250(VarCurr,bitIndex51)<->v8157(VarCurr,bitIndex51))& (v6250(VarCurr,bitIndex50)<->v8157(VarCurr,bitIndex50))& (v6250(VarCurr,bitIndex49)<->v8157(VarCurr,bitIndex49))& (v6250(VarCurr,bitIndex48)<->v8157(VarCurr,bitIndex48))& (v6250(VarCurr,bitIndex47)<->v8157(VarCurr,bitIndex47))& (v6250(VarCurr,bitIndex46)<->v8157(VarCurr,bitIndex46))& (v6250(VarCurr,bitIndex45)<->v8157(VarCurr,bitIndex45))& (v6250(VarCurr,bitIndex44)<->v8157(VarCurr,bitIndex44))& (v6250(VarCurr,bitIndex43)<->v8157(VarCurr,bitIndex43))& (v6250(VarCurr,bitIndex42)<->v8157(VarCurr,bitIndex42))& (v6250(VarCurr,bitIndex41)<->v8157(VarCurr,bitIndex41))& (v6250(VarCurr,bitIndex40)<->v8157(VarCurr,bitIndex40))& (v6250(VarCurr,bitIndex39)<->v8157(VarCurr,bitIndex39))& (v6250(VarCurr,bitIndex38)<->v8157(VarCurr,bitIndex38))& (v6250(VarCurr,bitIndex37)<->v8157(VarCurr,bitIndex37))& (v6250(VarCurr,bitIndex36)<->v8157(VarCurr,bitIndex36))& (v6250(VarCurr,bitIndex35)<->v8157(VarCurr,bitIndex35))& (v6250(VarCurr,bitIndex34)<->v8157(VarCurr,bitIndex34))& (v6250(VarCurr,bitIndex33)<->v8157(VarCurr,bitIndex33))& (v6250(VarCurr,bitIndex32)<->v8157(VarCurr,bitIndex32))& (v6250(VarCurr,bitIndex31)<->v8157(VarCurr,bitIndex31))& (v6250(VarCurr,bitIndex30)<->v8157(VarCurr,bitIndex30))& (v6250(VarCurr,bitIndex29)<->v8157(VarCurr,bitIndex29))& (v6250(VarCurr,bitIndex28)<->v8157(VarCurr,bitIndex28))& (v6250(VarCurr,bitIndex27)<->v8157(VarCurr,bitIndex27))& (v6250(VarCurr,bitIndex26)<->v8157(VarCurr,bitIndex26))& (v6250(VarCurr,bitIndex25)<->v8157(VarCurr,bitIndex25))& (v6250(VarCurr,bitIndex24)<->v8157(VarCurr,bitIndex24))& (v6250(VarCurr,bitIndex23)<->v8157(VarCurr,bitIndex23))& (v6250(VarCurr,bitIndex22)<->v8157(VarCurr,bitIndex22))& (v6250(VarCurr,bitIndex21)<->v8157(VarCurr,bitIndex21))& (v6250(VarCurr,bitIndex20)<->v8157(VarCurr,bitIndex20))& (v6250(VarCurr,bitIndex19)<->v8157(VarCurr,bitIndex19))& (v6250(VarCurr,bitIndex18)<->v8157(VarCurr,bitIndex18))& (v6250(VarCurr,bitIndex17)<->v8157(VarCurr,bitIndex17))& (v6250(VarCurr,bitIndex16)<->v8157(VarCurr,bitIndex16))& (v6250(VarCurr,bitIndex15)<->v8157(VarCurr,bitIndex15))& (v6250(VarCurr,bitIndex14)<->v8157(VarCurr,bitIndex14))& (v6250(VarCurr,bitIndex13)<->v8157(VarCurr,bitIndex13))& (v6250(VarCurr,bitIndex12)<->v8157(VarCurr,bitIndex12))& (v6250(VarCurr,bitIndex11)<->v8157(VarCurr,bitIndex11))& (v6250(VarCurr,bitIndex10)<->v8157(VarCurr,bitIndex10))& (v6250(VarCurr,bitIndex9)<->v8157(VarCurr,bitIndex9))& (v6250(VarCurr,bitIndex8)<->v8157(VarCurr,bitIndex8))& (v6250(VarCurr,bitIndex7)<->v8157(VarCurr,bitIndex7))& (v6250(VarCurr,bitIndex6)<->v8157(VarCurr,bitIndex6))& (v6250(VarCurr,bitIndex5)<->v8157(VarCurr,bitIndex5))& (v6250(VarCurr,bitIndex4)<->v8157(VarCurr,bitIndex4))& (v6250(VarCurr,bitIndex3)<->v8157(VarCurr,bitIndex3))& (v6250(VarCurr,bitIndex2)<->v8157(VarCurr,bitIndex2))& (v6250(VarCurr,bitIndex1)<->v8157(VarCurr,bitIndex1))& (v6250(VarCurr,bitIndex0)<->v8157(VarCurr,bitIndex0))).
% 297.04/295.29  all VarCurr (-v8176(VarCurr)<->v8177(VarCurr)).
% 297.04/295.29  all VarCurr (v8177(VarCurr)<->v8178(VarCurr)&v8179(VarCurr)).
% 297.04/295.29  all VarCurr (-v8179(VarCurr)<->v8180(VarCurr)).
% 297.04/295.29  all VarCurr (v8180(VarCurr)<->v618(VarCurr)&v8181(VarCurr)).
% 297.04/295.29  all VarCurr (v8181(VarCurr)<-> (v757(VarCurr,bitIndex8)<->v5939(VarCurr,bitIndex8))& (v757(VarCurr,bitIndex7)<->v5939(VarCurr,bitIndex7))& (v757(VarCurr,bitIndex6)<->v5939(VarCurr,bitIndex6))& (v757(VarCurr,bitIndex5)<->v5939(VarCurr,bitIndex5))& (v757(VarCurr,bitIndex4)<->v5939(VarCurr,bitIndex4))& (v757(VarCurr,bitIndex3)<->v5939(VarCurr,bitIndex3))& (v757(VarCurr,bitIndex2)<->v5939(VarCurr,bitIndex2))& (v757(VarCurr,bitIndex1)<->v5939(VarCurr,bitIndex1))& (v757(VarCurr,bitIndex0)<->v5939(VarCurr,bitIndex0))).
% 297.09/295.29  all VarCurr (v8178(VarCurr)<->v7(VarCurr)&v73(VarCurr)).
% 297.09/295.29  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7(VarNext)-> (all B (range_59_0(B)-> (v6250(VarNext,B)<->v6250(VarCurr,B)))))).
% 297.09/295.29  all VarNext (v7(VarNext)-> (all B (range_59_0(B)-> (v6250(VarNext,B)<->v8161(VarNext,B))))).
% 297.09/295.29  all VarCurr (-v73(VarCurr)-> (all B (range_59_0(B)-> (v8161(VarCurr,B)<->$F)))).
% 297.09/295.29  all VarCurr (v73(VarCurr)-> (all B (range_59_0(B)-> (v8161(VarCurr,B)<->v8162(VarCurr,B))))).
% 297.09/295.29  all VarCurr (-v8163(VarCurr)-> (all B (range_59_0(B)-> (v8162(VarCurr,B)<->v8157(VarCurr,B))))).
% 297.09/295.29  all VarCurr (v8163(VarCurr)-> (all B (range_59_0(B)-> (v8162(VarCurr,B)<->bxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx(B))))).
% 297.09/295.29  all VarCurr (v8163(VarCurr)<->v618(VarCurr)&v8164(VarCurr)).
% 297.09/295.29  all VarCurr (v8164(VarCurr)<-> (v757(VarCurr,bitIndex8)<->v5939(VarCurr,bitIndex8))& (v757(VarCurr,bitIndex7)<->v5939(VarCurr,bitIndex7))& (v757(VarCurr,bitIndex6)<->v5939(VarCurr,bitIndex6))& (v757(VarCurr,bitIndex5)<->v5939(VarCurr,bitIndex5))& (v757(VarCurr,bitIndex4)<->v5939(VarCurr,bitIndex4))& (v757(VarCurr,bitIndex3)<->v5939(VarCurr,bitIndex3))& (v757(VarCurr,bitIndex2)<->v5939(VarCurr,bitIndex2))& (v757(VarCurr,bitIndex1)<->v5939(VarCurr,bitIndex1))& (v757(VarCurr,bitIndex0)<->v5939(VarCurr,bitIndex0))).
% 297.09/295.29  all B (range_59_0(B)-> (v6250(constB0,B)<->$F)).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex59).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex58).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex57).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex56).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex55).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex54).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex53).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex52).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex51).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex50).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex49).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex48).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex47).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex46).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex45).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex44).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex43).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex42).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex41).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex40).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex39).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex38).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex37).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex36).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex35).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex34).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex33).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex32).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex31).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex30).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex29).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex28).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex27).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex26).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex25).
% 297.09/295.29  -b000000000000000000000000000000000000000000000000000000000000(bitIndex24).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex23).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex22).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex21).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex20).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex19).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex18).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex17).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex16).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex15).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex14).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex13).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex12).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex11).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex10).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex9).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex8).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex7).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex6).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex5).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex4).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex3).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex2).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex1).
% 297.09/295.30  -b000000000000000000000000000000000000000000000000000000000000(bitIndex0).
% 297.09/295.30  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all AssociatedAddressVar (v757_range_8_to_0_address_association(VarNext,AssociatedAddressVar)-> (all A (address(A)-> (all B (A=AssociatedAddressVar-> (range_59_0(B)-> (v8157(VarNext,B)<->v6253_array(VarNext,A,B)))))))))).
% 297.09/295.30  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all AssociatedAddressVar (v5939_range_8_to_0_address_association(VarNext,AssociatedAddressVar)-> (all A (-(A=AssociatedAddressVar&v8153(VarNext))-> (all B (range_59_0(B)-> (v6253_array(VarNext,A,B)<->v6253_array(VarCurr,A,B))))))))).
% 297.09/295.30  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all AssociatedAddressVar (v5939_range_8_to_0_address_association(VarNext,AssociatedAddressVar)-> (all A (A=AssociatedAddressVar&v8153(VarNext)-> (all B (range_59_0(B)-> (v6253_array(VarNext,A,B)<->v6255(VarNext,B))))))))).
% 297.09/295.30  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8153(VarNext)<->v8154(VarNext)&v618(VarNext))).
% 297.09/295.30  all VarCurr (-v8154(VarCurr)<->v7(VarCurr)).
% 297.09/295.30  all VarCurr B (range_59_0(B)-> (v6255(VarCurr,B)<->v6257(VarCurr,B))).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex59)<->v6259(VarCurr)).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex58)<->v6287(VarCurr)).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex57)<->v6315(VarCurr)).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex56)<->v6343(VarCurr)).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex55)<->v6371(VarCurr)).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex54)<->v6399(VarCurr)).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex53)<->v6427(VarCurr)).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex52)<->v6455(VarCurr)).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex51)<->v6483(VarCurr)).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex50)<->v6571(VarCurr)).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex49)<->v6657(VarCurr)).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex48)<->v6742(VarCurr)).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex47)<->v6835(VarCurr)).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex46)<->v6863(VarCurr)).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex45)<->v6891(VarCurr)).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex44)<->v6919(VarCurr)).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex43)<->v6947(VarCurr)).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex42)<->v6975(VarCurr)).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex41)<->v7003(VarCurr)).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex40)<->v7031(VarCurr)).
% 297.09/295.30  all VarCurr (v6257(VarCurr,bitIndex39)<->v7059(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex38)<->v7087(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex37)<->v7115(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex36)<->v7143(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex35)<->v7171(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex34)<->v7199(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex33)<->v7227(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex32)<->v7255(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex31)<->v7283(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex30)<->v174(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex29)<->v7311(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex28)<->v7339(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex27)<->v7367(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex26)<->v7395(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex25)<->v7423(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex24)<->v7451(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex23)<->v7479(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex22)<->v7507(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex21)<->v7535(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex20)<->v7563(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex19)<->v7591(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex18)<->v7619(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex17)<->v7647(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex16)<->v7675(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex15)<->v7703(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex14)<->v7731(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex13)<->v7759(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex12)<->v7787(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex11)<->v7815(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex10)<->v7843(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex9)<->v7871(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex8)<->v7899(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex7)<->v7927(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex6)<->v7955(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex5)<->v7983(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex4)<->v8011(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex3)<->v8039(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex2)<->v8067(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex1)<->v8095(VarCurr)).
% 297.09/295.31  all VarCurr (v6257(VarCurr,bitIndex0)<->v8123(VarCurr)).
% 297.09/295.31  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v8134(VarNext)-> (v8123(VarNext)<->v8123(VarCurr)))).
% 297.09/295.31  all VarNext (v8134(VarNext)-> (v8123(VarNext)<->v8144(VarNext))).
% 297.09/295.31  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8144(VarNext)<->v8142(VarCurr))).
% 297.09/295.31  all VarCurr (-v8145(VarCurr)-> (v8142(VarCurr)<->x552(VarCurr))).
% 297.09/295.31  all VarCurr (v8145(VarCurr)-> (v8142(VarCurr)<->v8129(VarCurr))).
% 297.09/295.31  all VarCurr (v8145(VarCurr)<->v8146(VarCurr)&v8147(VarCurr)).
% 297.09/295.31  all VarCurr (-v8147(VarCurr)<->v8127(VarCurr)).
% 297.09/295.31  all VarCurr (-v8146(VarCurr)<->v8125(VarCurr)).
% 297.09/295.31  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8134(VarNext)<->v8135(VarNext))).
% 297.09/295.31  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8135(VarNext)<->v8136(VarNext)&v8131(VarNext))).
% 297.09/295.31  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v8136(VarNext)<->v8138(VarNext))).
% 297.09/295.31  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8138(VarNext)<->v8131(VarCurr))).
% 297.09/295.31  v8123(constB0)<->$F.
% 297.09/295.31  all VarCurr (v8131(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.09/295.31  all VarCurr (v8129(VarCurr)<->v202(VarCurr,bitIndex0)).
% 297.09/295.31  all VarCurr (v202(VarCurr,bitIndex0)<->v204(VarCurr,bitIndex0)).
% 297.09/295.31  all VarCurr (v204(VarCurr,bitIndex0)<->v546(VarCurr,bitIndex0)).
% 297.09/295.31  all VarCurr (v8127(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.09/295.31  all VarCurr (v8125(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.09/295.31  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v8106(VarNext)-> (v8095(VarNext)<->v8095(VarCurr)))).
% 297.09/295.31  all VarNext (v8106(VarNext)-> (v8095(VarNext)<->v8116(VarNext))).
% 297.09/295.31  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8116(VarNext)<->v8114(VarCurr))).
% 297.09/295.31  all VarCurr (-v8117(VarCurr)-> (v8114(VarCurr)<->x552(VarCurr))).
% 297.09/295.31  all VarCurr (v8117(VarCurr)-> (v8114(VarCurr)<->v8101(VarCurr))).
% 297.09/295.31  all VarCurr (v8117(VarCurr)<->v8118(VarCurr)&v8119(VarCurr)).
% 297.09/295.31  all VarCurr (-v8119(VarCurr)<->v8099(VarCurr)).
% 297.09/295.31  all VarCurr (-v8118(VarCurr)<->v8097(VarCurr)).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8106(VarNext)<->v8107(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8107(VarNext)<->v8108(VarNext)&v8103(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v8108(VarNext)<->v8110(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8110(VarNext)<->v8103(VarCurr))).
% 297.09/295.32  v8095(constB0)<->$F.
% 297.09/295.32  all VarCurr (v8103(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.09/295.32  all VarCurr (v8101(VarCurr)<->v202(VarCurr,bitIndex1)).
% 297.09/295.32  all VarCurr (v202(VarCurr,bitIndex1)<->v204(VarCurr,bitIndex1)).
% 297.09/295.32  all VarCurr (v204(VarCurr,bitIndex1)<->v546(VarCurr,bitIndex1)).
% 297.09/295.32  all VarCurr (v8099(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.09/295.32  all VarCurr (v8097(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v8078(VarNext)-> (v8067(VarNext)<->v8067(VarCurr)))).
% 297.09/295.32  all VarNext (v8078(VarNext)-> (v8067(VarNext)<->v8088(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8088(VarNext)<->v8086(VarCurr))).
% 297.09/295.32  all VarCurr (-v8089(VarCurr)-> (v8086(VarCurr)<->x552(VarCurr))).
% 297.09/295.32  all VarCurr (v8089(VarCurr)-> (v8086(VarCurr)<->v8073(VarCurr))).
% 297.09/295.32  all VarCurr (v8089(VarCurr)<->v8090(VarCurr)&v8091(VarCurr)).
% 297.09/295.32  all VarCurr (-v8091(VarCurr)<->v8071(VarCurr)).
% 297.09/295.32  all VarCurr (-v8090(VarCurr)<->v8069(VarCurr)).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8078(VarNext)<->v8079(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8079(VarNext)<->v8080(VarNext)&v8075(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v8080(VarNext)<->v8082(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8082(VarNext)<->v8075(VarCurr))).
% 297.09/295.32  v8067(constB0)<->$F.
% 297.09/295.32  all VarCurr (v8075(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.09/295.32  all VarCurr (v8073(VarCurr)<->v202(VarCurr,bitIndex2)).
% 297.09/295.32  all VarCurr (v202(VarCurr,bitIndex2)<->v204(VarCurr,bitIndex2)).
% 297.09/295.32  all VarCurr (v204(VarCurr,bitIndex2)<->v546(VarCurr,bitIndex2)).
% 297.09/295.32  all VarCurr (v8071(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.09/295.32  all VarCurr (v8069(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v8050(VarNext)-> (v8039(VarNext)<->v8039(VarCurr)))).
% 297.09/295.32  all VarNext (v8050(VarNext)-> (v8039(VarNext)<->v8060(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8060(VarNext)<->v8058(VarCurr))).
% 297.09/295.32  all VarCurr (-v8061(VarCurr)-> (v8058(VarCurr)<->x552(VarCurr))).
% 297.09/295.32  all VarCurr (v8061(VarCurr)-> (v8058(VarCurr)<->v8045(VarCurr))).
% 297.09/295.32  all VarCurr (v8061(VarCurr)<->v8062(VarCurr)&v8063(VarCurr)).
% 297.09/295.32  all VarCurr (-v8063(VarCurr)<->v8043(VarCurr)).
% 297.09/295.32  all VarCurr (-v8062(VarCurr)<->v8041(VarCurr)).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8050(VarNext)<->v8051(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8051(VarNext)<->v8052(VarNext)&v8047(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v8052(VarNext)<->v8054(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8054(VarNext)<->v8047(VarCurr))).
% 297.09/295.32  v8039(constB0)<->$F.
% 297.09/295.32  all VarCurr (v8047(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.09/295.32  all VarCurr (v8045(VarCurr)<->v202(VarCurr,bitIndex3)).
% 297.09/295.32  all VarCurr (v202(VarCurr,bitIndex3)<->v204(VarCurr,bitIndex3)).
% 297.09/295.32  all VarCurr (v204(VarCurr,bitIndex3)<->v546(VarCurr,bitIndex3)).
% 297.09/295.32  all VarCurr (v8043(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.09/295.32  all VarCurr (v8041(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v8022(VarNext)-> (v8011(VarNext)<->v8011(VarCurr)))).
% 297.09/295.32  all VarNext (v8022(VarNext)-> (v8011(VarNext)<->v8032(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8032(VarNext)<->v8030(VarCurr))).
% 297.09/295.32  all VarCurr (-v8033(VarCurr)-> (v8030(VarCurr)<->x552(VarCurr))).
% 297.09/295.32  all VarCurr (v8033(VarCurr)-> (v8030(VarCurr)<->v8017(VarCurr))).
% 297.09/295.32  all VarCurr (v8033(VarCurr)<->v8034(VarCurr)&v8035(VarCurr)).
% 297.09/295.32  all VarCurr (-v8035(VarCurr)<->v8015(VarCurr)).
% 297.09/295.32  all VarCurr (-v8034(VarCurr)<->v8013(VarCurr)).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8022(VarNext)<->v8023(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8023(VarNext)<->v8024(VarNext)&v8019(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v8024(VarNext)<->v8026(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8026(VarNext)<->v8019(VarCurr))).
% 297.09/295.32  v8011(constB0)<->$F.
% 297.09/295.32  all VarCurr (v8019(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.09/295.32  all VarCurr (v8017(VarCurr)<->v202(VarCurr,bitIndex4)).
% 297.09/295.32  all VarCurr (v202(VarCurr,bitIndex4)<->v204(VarCurr,bitIndex4)).
% 297.09/295.32  all VarCurr (v204(VarCurr,bitIndex4)<->v546(VarCurr,bitIndex4)).
% 297.09/295.32  all VarCurr (v8015(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.09/295.32  all VarCurr (v8013(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7994(VarNext)-> (v7983(VarNext)<->v7983(VarCurr)))).
% 297.09/295.32  all VarNext (v7994(VarNext)-> (v7983(VarNext)<->v8004(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v8004(VarNext)<->v8002(VarCurr))).
% 297.09/295.32  all VarCurr (-v8005(VarCurr)-> (v8002(VarCurr)<->x552(VarCurr))).
% 297.09/295.32  all VarCurr (v8005(VarCurr)-> (v8002(VarCurr)<->v7989(VarCurr))).
% 297.09/295.32  all VarCurr (v8005(VarCurr)<->v8006(VarCurr)&v8007(VarCurr)).
% 297.09/295.32  all VarCurr (-v8007(VarCurr)<->v7987(VarCurr)).
% 297.09/295.32  all VarCurr (-v8006(VarCurr)<->v7985(VarCurr)).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7994(VarNext)<->v7995(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7995(VarNext)<->v7996(VarNext)&v7991(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7996(VarNext)<->v7998(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7998(VarNext)<->v7991(VarCurr))).
% 297.09/295.32  v7983(constB0)<->$F.
% 297.09/295.32  all VarCurr (v7991(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.09/295.32  all VarCurr (v7989(VarCurr)<->v202(VarCurr,bitIndex5)).
% 297.09/295.32  all VarCurr (v202(VarCurr,bitIndex5)<->v204(VarCurr,bitIndex5)).
% 297.09/295.32  all VarCurr (v204(VarCurr,bitIndex5)<->v546(VarCurr,bitIndex5)).
% 297.09/295.32  all VarCurr (v7987(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.09/295.32  all VarCurr (v7985(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7966(VarNext)-> (v7955(VarNext)<->v7955(VarCurr)))).
% 297.09/295.32  all VarNext (v7966(VarNext)-> (v7955(VarNext)<->v7976(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7976(VarNext)<->v7974(VarCurr))).
% 297.09/295.32  all VarCurr (-v7977(VarCurr)-> (v7974(VarCurr)<->x552(VarCurr))).
% 297.09/295.32  all VarCurr (v7977(VarCurr)-> (v7974(VarCurr)<->v7961(VarCurr))).
% 297.09/295.32  all VarCurr (v7977(VarCurr)<->v7978(VarCurr)&v7979(VarCurr)).
% 297.09/295.32  all VarCurr (-v7979(VarCurr)<->v7959(VarCurr)).
% 297.09/295.32  all VarCurr (-v7978(VarCurr)<->v7957(VarCurr)).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7966(VarNext)<->v7967(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7967(VarNext)<->v7968(VarNext)&v7963(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7968(VarNext)<->v7970(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7970(VarNext)<->v7963(VarCurr))).
% 297.09/295.32  v7955(constB0)<->$F.
% 297.09/295.32  all VarCurr (v7963(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.09/295.32  all VarCurr (v7961(VarCurr)<->v202(VarCurr,bitIndex6)).
% 297.09/295.32  all VarCurr (v202(VarCurr,bitIndex6)<->v204(VarCurr,bitIndex6)).
% 297.09/295.32  all VarCurr (v204(VarCurr,bitIndex6)<->v546(VarCurr,bitIndex6)).
% 297.09/295.32  all VarCurr (v7959(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.09/295.32  all VarCurr (v7957(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7938(VarNext)-> (v7927(VarNext)<->v7927(VarCurr)))).
% 297.09/295.32  all VarNext (v7938(VarNext)-> (v7927(VarNext)<->v7948(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7948(VarNext)<->v7946(VarCurr))).
% 297.09/295.32  all VarCurr (-v7949(VarCurr)-> (v7946(VarCurr)<->x552(VarCurr))).
% 297.09/295.32  all VarCurr (v7949(VarCurr)-> (v7946(VarCurr)<->v7933(VarCurr))).
% 297.09/295.32  all VarCurr (v7949(VarCurr)<->v7950(VarCurr)&v7951(VarCurr)).
% 297.09/295.32  all VarCurr (-v7951(VarCurr)<->v7931(VarCurr)).
% 297.09/295.32  all VarCurr (-v7950(VarCurr)<->v7929(VarCurr)).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7938(VarNext)<->v7939(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7939(VarNext)<->v7940(VarNext)&v7935(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7940(VarNext)<->v7942(VarNext))).
% 297.09/295.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7942(VarNext)<->v7935(VarCurr))).
% 297.09/295.32  v7927(constB0)<->$F.
% 297.09/295.32  all VarCurr (v7935(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.09/295.32  all VarCurr (v7933(VarCurr)<->v202(VarCurr,bitIndex7)).
% 297.09/295.33  all VarCurr (v202(VarCurr,bitIndex7)<->v204(VarCurr,bitIndex7)).
% 297.09/295.33  all VarCurr (v204(VarCurr,bitIndex7)<->v546(VarCurr,bitIndex7)).
% 297.09/295.33  all VarCurr (v7931(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.09/295.33  all VarCurr (v7929(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.09/295.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7910(VarNext)-> (v7899(VarNext)<->v7899(VarCurr)))).
% 297.09/295.33  all VarNext (v7910(VarNext)-> (v7899(VarNext)<->v7920(VarNext))).
% 297.09/295.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7920(VarNext)<->v7918(VarCurr))).
% 297.09/295.33  all VarCurr (-v7921(VarCurr)-> (v7918(VarCurr)<->x552(VarCurr))).
% 297.09/295.33  all VarCurr (v7921(VarCurr)-> (v7918(VarCurr)<->v7905(VarCurr))).
% 297.09/295.33  all VarCurr (v7921(VarCurr)<->v7922(VarCurr)&v7923(VarCurr)).
% 297.09/295.33  all VarCurr (-v7923(VarCurr)<->v7903(VarCurr)).
% 297.09/295.33  all VarCurr (-v7922(VarCurr)<->v7901(VarCurr)).
% 297.09/295.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7910(VarNext)<->v7911(VarNext))).
% 297.09/295.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7911(VarNext)<->v7912(VarNext)&v7907(VarNext))).
% 297.09/295.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7912(VarNext)<->v7914(VarNext))).
% 297.09/295.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7914(VarNext)<->v7907(VarCurr))).
% 297.09/295.33  v7899(constB0)<->$F.
% 297.09/295.33  all VarCurr (v7907(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.09/295.33  all VarCurr (v7905(VarCurr)<->v202(VarCurr,bitIndex8)).
% 297.09/295.33  all VarCurr (v202(VarCurr,bitIndex8)<->v204(VarCurr,bitIndex8)).
% 297.09/295.33  all VarCurr (v204(VarCurr,bitIndex8)<->v546(VarCurr,bitIndex8)).
% 297.09/295.33  all VarCurr (v7903(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.09/295.33  all VarCurr (v7901(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.09/295.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7882(VarNext)-> (v7871(VarNext)<->v7871(VarCurr)))).
% 297.09/295.33  all VarNext (v7882(VarNext)-> (v7871(VarNext)<->v7892(VarNext))).
% 297.09/295.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7892(VarNext)<->v7890(VarCurr))).
% 297.09/295.33  all VarCurr (-v7893(VarCurr)-> (v7890(VarCurr)<->x552(VarCurr))).
% 297.09/295.33  all VarCurr (v7893(VarCurr)-> (v7890(VarCurr)<->v7877(VarCurr))).
% 297.09/295.33  all VarCurr (v7893(VarCurr)<->v7894(VarCurr)&v7895(VarCurr)).
% 297.09/295.33  all VarCurr (-v7895(VarCurr)<->v7875(VarCurr)).
% 297.09/295.33  all VarCurr (-v7894(VarCurr)<->v7873(VarCurr)).
% 297.09/295.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7882(VarNext)<->v7883(VarNext))).
% 297.09/295.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7883(VarNext)<->v7884(VarNext)&v7879(VarNext))).
% 297.09/295.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7884(VarNext)<->v7886(VarNext))).
% 297.09/295.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7886(VarNext)<->v7879(VarCurr))).
% 297.09/295.33  v7871(constB0)<->$F.
% 297.09/295.33  all VarCurr (v7879(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.09/295.33  all VarCurr (v7877(VarCurr)<->v202(VarCurr,bitIndex9)).
% 297.09/295.33  all VarCurr (v202(VarCurr,bitIndex9)<->v204(VarCurr,bitIndex9)).
% 297.09/295.33  all VarCurr (v204(VarCurr,bitIndex9)<->v546(VarCurr,bitIndex9)).
% 297.09/295.33  all VarCurr (v7875(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.09/295.33  all VarCurr (v7873(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.09/295.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7854(VarNext)-> (v7843(VarNext)<->v7843(VarCurr)))).
% 297.09/295.33  all VarNext (v7854(VarNext)-> (v7843(VarNext)<->v7864(VarNext))).
% 297.09/295.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7864(VarNext)<->v7862(VarCurr))).
% 297.09/295.33  all VarCurr (-v7865(VarCurr)-> (v7862(VarCurr)<->x552(VarCurr))).
% 297.09/295.33  all VarCurr (v7865(VarCurr)-> (v7862(VarCurr)<->v7849(VarCurr))).
% 297.09/295.33  all VarCurr (v7865(VarCurr)<->v7866(VarCurr)&v7867(VarCurr)).
% 297.09/295.33  all VarCurr (-v7867(VarCurr)<->v7847(VarCurr)).
% 297.09/295.33  all VarCurr (-v7866(VarCurr)<->v7845(VarCurr)).
% 297.09/295.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7854(VarNext)<->v7855(VarNext))).
% 297.09/295.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7855(VarNext)<->v7856(VarNext)&v7851(VarNext))).
% 297.09/295.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7856(VarNext)<->v7858(VarNext))).
% 297.09/295.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7858(VarNext)<->v7851(VarCurr))).
% 297.09/295.33  v7843(constB0)<->$F.
% 297.09/295.33  all VarCurr (v7851(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.09/295.33  all VarCurr (v7849(VarCurr)<->v202(VarCurr,bitIndex10)).
% 297.09/295.33  all VarCurr (v202(VarCurr,bitIndex10)<->v204(VarCurr,bitIndex10)).
% 297.09/295.33  all VarCurr (v204(VarCurr,bitIndex10)<->v546(VarCurr,bitIndex10)).
% 297.09/295.33  all VarCurr (v7847(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.09/295.33  all VarCurr (v7845(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.09/295.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7826(VarNext)-> (v7815(VarNext)<->v7815(VarCurr)))).
% 297.09/295.34  all VarNext (v7826(VarNext)-> (v7815(VarNext)<->v7836(VarNext))).
% 297.09/295.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7836(VarNext)<->v7834(VarCurr))).
% 297.09/295.34  all VarCurr (-v7837(VarCurr)-> (v7834(VarCurr)<->x552(VarCurr))).
% 297.09/295.34  all VarCurr (v7837(VarCurr)-> (v7834(VarCurr)<->v7821(VarCurr))).
% 297.09/295.34  all VarCurr (v7837(VarCurr)<->v7838(VarCurr)&v7839(VarCurr)).
% 297.09/295.34  all VarCurr (-v7839(VarCurr)<->v7819(VarCurr)).
% 297.09/295.34  all VarCurr (-v7838(VarCurr)<->v7817(VarCurr)).
% 297.09/295.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7826(VarNext)<->v7827(VarNext))).
% 297.09/295.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7827(VarNext)<->v7828(VarNext)&v7823(VarNext))).
% 297.09/295.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7828(VarNext)<->v7830(VarNext))).
% 297.09/295.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7830(VarNext)<->v7823(VarCurr))).
% 297.09/295.34  v7815(constB0)<->$F.
% 297.09/295.34  all VarCurr (v7823(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.09/295.34  all VarCurr (v7821(VarCurr)<->v202(VarCurr,bitIndex11)).
% 297.09/295.34  all VarCurr (v202(VarCurr,bitIndex11)<->v204(VarCurr,bitIndex11)).
% 297.09/295.34  all VarCurr (v204(VarCurr,bitIndex11)<->v546(VarCurr,bitIndex11)).
% 297.09/295.34  all VarCurr (v7819(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.09/295.34  all VarCurr (v7817(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.09/295.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7798(VarNext)-> (v7787(VarNext)<->v7787(VarCurr)))).
% 297.09/295.34  all VarNext (v7798(VarNext)-> (v7787(VarNext)<->v7808(VarNext))).
% 297.09/295.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7808(VarNext)<->v7806(VarCurr))).
% 297.09/295.34  all VarCurr (-v7809(VarCurr)-> (v7806(VarCurr)<->x552(VarCurr))).
% 297.09/295.34  all VarCurr (v7809(VarCurr)-> (v7806(VarCurr)<->v7793(VarCurr))).
% 297.09/295.34  all VarCurr (v7809(VarCurr)<->v7810(VarCurr)&v7811(VarCurr)).
% 297.09/295.34  all VarCurr (-v7811(VarCurr)<->v7791(VarCurr)).
% 297.09/295.34  all VarCurr (-v7810(VarCurr)<->v7789(VarCurr)).
% 297.09/295.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7798(VarNext)<->v7799(VarNext))).
% 297.09/295.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7799(VarNext)<->v7800(VarNext)&v7795(VarNext))).
% 297.09/295.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7800(VarNext)<->v7802(VarNext))).
% 297.09/295.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7802(VarNext)<->v7795(VarCurr))).
% 297.09/295.34  v7787(constB0)<->$F.
% 297.09/295.34  all VarCurr (v7795(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.09/295.34  all VarCurr (v7793(VarCurr)<->v202(VarCurr,bitIndex12)).
% 297.09/295.34  all VarCurr (v202(VarCurr,bitIndex12)<->v204(VarCurr,bitIndex12)).
% 297.09/295.34  all VarCurr (v204(VarCurr,bitIndex12)<->v546(VarCurr,bitIndex12)).
% 297.09/295.34  all VarCurr (v7791(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.09/295.34  all VarCurr (v7789(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.09/295.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7770(VarNext)-> (v7759(VarNext)<->v7759(VarCurr)))).
% 297.09/295.34  all VarNext (v7770(VarNext)-> (v7759(VarNext)<->v7780(VarNext))).
% 297.09/295.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7780(VarNext)<->v7778(VarCurr))).
% 297.09/295.34  all VarCurr (-v7781(VarCurr)-> (v7778(VarCurr)<->x552(VarCurr))).
% 297.09/295.34  all VarCurr (v7781(VarCurr)-> (v7778(VarCurr)<->v7765(VarCurr))).
% 297.09/295.34  all VarCurr (v7781(VarCurr)<->v7782(VarCurr)&v7783(VarCurr)).
% 297.09/295.34  all VarCurr (-v7783(VarCurr)<->v7763(VarCurr)).
% 297.09/295.34  all VarCurr (-v7782(VarCurr)<->v7761(VarCurr)).
% 297.09/295.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7770(VarNext)<->v7771(VarNext))).
% 297.09/295.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7771(VarNext)<->v7772(VarNext)&v7767(VarNext))).
% 297.09/295.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7772(VarNext)<->v7774(VarNext))).
% 297.09/295.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7774(VarNext)<->v7767(VarCurr))).
% 297.09/295.34  v7759(constB0)<->$F.
% 297.09/295.34  all VarCurr (v7767(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.09/295.34  all VarCurr (v7765(VarCurr)<->v202(VarCurr,bitIndex13)).
% 297.09/295.34  all VarCurr (v202(VarCurr,bitIndex13)<->v204(VarCurr,bitIndex13)).
% 297.09/295.34  all VarCurr (v204(VarCurr,bitIndex13)<->v546(VarCurr,bitIndex13)).
% 297.09/295.34  all VarCurr (v7763(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.09/295.34  all VarCurr (v7761(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.09/295.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7742(VarNext)-> (v7731(VarNext)<->v7731(VarCurr)))).
% 297.09/295.34  all VarNext (v7742(VarNext)-> (v7731(VarNext)<->v7752(VarNext))).
% 297.09/295.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7752(VarNext)<->v7750(VarCurr))).
% 297.14/295.35  all VarCurr (-v7753(VarCurr)-> (v7750(VarCurr)<->x552(VarCurr))).
% 297.14/295.35  all VarCurr (v7753(VarCurr)-> (v7750(VarCurr)<->v7737(VarCurr))).
% 297.14/295.35  all VarCurr (v7753(VarCurr)<->v7754(VarCurr)&v7755(VarCurr)).
% 297.14/295.35  all VarCurr (-v7755(VarCurr)<->v7735(VarCurr)).
% 297.14/295.35  all VarCurr (-v7754(VarCurr)<->v7733(VarCurr)).
% 297.14/295.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7742(VarNext)<->v7743(VarNext))).
% 297.14/295.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7743(VarNext)<->v7744(VarNext)&v7739(VarNext))).
% 297.14/295.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7744(VarNext)<->v7746(VarNext))).
% 297.14/295.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7746(VarNext)<->v7739(VarCurr))).
% 297.14/295.35  v7731(constB0)<->$F.
% 297.14/295.35  all VarCurr (v7739(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.14/295.35  all VarCurr (v7737(VarCurr)<->v202(VarCurr,bitIndex14)).
% 297.14/295.35  all VarCurr (v202(VarCurr,bitIndex14)<->v204(VarCurr,bitIndex14)).
% 297.14/295.35  all VarCurr (v204(VarCurr,bitIndex14)<->v546(VarCurr,bitIndex14)).
% 297.14/295.35  all VarCurr (v7735(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.14/295.35  all VarCurr (v7733(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.14/295.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7714(VarNext)-> (v7703(VarNext)<->v7703(VarCurr)))).
% 297.14/295.35  all VarNext (v7714(VarNext)-> (v7703(VarNext)<->v7724(VarNext))).
% 297.14/295.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7724(VarNext)<->v7722(VarCurr))).
% 297.14/295.35  all VarCurr (-v7725(VarCurr)-> (v7722(VarCurr)<->x552(VarCurr))).
% 297.14/295.35  all VarCurr (v7725(VarCurr)-> (v7722(VarCurr)<->v7709(VarCurr))).
% 297.14/295.35  all VarCurr (v7725(VarCurr)<->v7726(VarCurr)&v7727(VarCurr)).
% 297.14/295.35  all VarCurr (-v7727(VarCurr)<->v7707(VarCurr)).
% 297.14/295.35  all VarCurr (-v7726(VarCurr)<->v7705(VarCurr)).
% 297.14/295.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7714(VarNext)<->v7715(VarNext))).
% 297.14/295.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7715(VarNext)<->v7716(VarNext)&v7711(VarNext))).
% 297.14/295.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7716(VarNext)<->v7718(VarNext))).
% 297.14/295.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7718(VarNext)<->v7711(VarCurr))).
% 297.14/295.35  v7703(constB0)<->$F.
% 297.14/295.35  all VarCurr (v7711(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.14/295.35  all VarCurr (v7709(VarCurr)<->v202(VarCurr,bitIndex15)).
% 297.14/295.35  all VarCurr (v202(VarCurr,bitIndex15)<->v204(VarCurr,bitIndex15)).
% 297.14/295.35  all VarCurr (v204(VarCurr,bitIndex15)<->v546(VarCurr,bitIndex15)).
% 297.14/295.35  all VarCurr (v7707(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.14/295.35  all VarCurr (v7705(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.14/295.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7686(VarNext)-> (v7675(VarNext)<->v7675(VarCurr)))).
% 297.14/295.35  all VarNext (v7686(VarNext)-> (v7675(VarNext)<->v7696(VarNext))).
% 297.14/295.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7696(VarNext)<->v7694(VarCurr))).
% 297.14/295.35  all VarCurr (-v7697(VarCurr)-> (v7694(VarCurr)<->x552(VarCurr))).
% 297.14/295.35  all VarCurr (v7697(VarCurr)-> (v7694(VarCurr)<->v7681(VarCurr))).
% 297.14/295.35  all VarCurr (v7697(VarCurr)<->v7698(VarCurr)&v7699(VarCurr)).
% 297.14/295.35  all VarCurr (-v7699(VarCurr)<->v7679(VarCurr)).
% 297.14/295.35  all VarCurr (-v7698(VarCurr)<->v7677(VarCurr)).
% 297.14/295.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7686(VarNext)<->v7687(VarNext))).
% 297.14/295.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7687(VarNext)<->v7688(VarNext)&v7683(VarNext))).
% 297.14/295.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7688(VarNext)<->v7690(VarNext))).
% 297.14/295.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7690(VarNext)<->v7683(VarCurr))).
% 297.14/295.35  v7675(constB0)<->$F.
% 297.14/295.35  all VarCurr (v7683(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.14/295.35  all VarCurr (v7681(VarCurr)<->v202(VarCurr,bitIndex16)).
% 297.14/295.35  all VarCurr (v202(VarCurr,bitIndex16)<->v204(VarCurr,bitIndex16)).
% 297.14/295.35  all VarCurr (v204(VarCurr,bitIndex16)<->v546(VarCurr,bitIndex16)).
% 297.14/295.35  all VarCurr (v7679(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.14/295.35  all VarCurr (v7677(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.14/295.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7658(VarNext)-> (v7647(VarNext)<->v7647(VarCurr)))).
% 297.14/295.35  all VarNext (v7658(VarNext)-> (v7647(VarNext)<->v7668(VarNext))).
% 297.14/295.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7668(VarNext)<->v7666(VarCurr))).
% 297.14/295.35  all VarCurr (-v7669(VarCurr)-> (v7666(VarCurr)<->x552(VarCurr))).
% 297.14/295.35  all VarCurr (v7669(VarCurr)-> (v7666(VarCurr)<->v7653(VarCurr))).
% 297.14/295.35  all VarCurr (v7669(VarCurr)<->v7670(VarCurr)&v7671(VarCurr)).
% 297.15/295.36  all VarCurr (-v7671(VarCurr)<->v7651(VarCurr)).
% 297.15/295.36  all VarCurr (-v7670(VarCurr)<->v7649(VarCurr)).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7658(VarNext)<->v7659(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7659(VarNext)<->v7660(VarNext)&v7655(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7660(VarNext)<->v7662(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7662(VarNext)<->v7655(VarCurr))).
% 297.15/295.36  v7647(constB0)<->$F.
% 297.15/295.36  all VarCurr (v7655(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.36  all VarCurr (v7653(VarCurr)<->v202(VarCurr,bitIndex17)).
% 297.15/295.36  all VarCurr (v202(VarCurr,bitIndex17)<->v204(VarCurr,bitIndex17)).
% 297.15/295.36  all VarCurr (v204(VarCurr,bitIndex17)<->v546(VarCurr,bitIndex17)).
% 297.15/295.36  all VarCurr (v7651(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.36  all VarCurr (v7649(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7630(VarNext)-> (v7619(VarNext)<->v7619(VarCurr)))).
% 297.15/295.36  all VarNext (v7630(VarNext)-> (v7619(VarNext)<->v7640(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7640(VarNext)<->v7638(VarCurr))).
% 297.15/295.36  all VarCurr (-v7641(VarCurr)-> (v7638(VarCurr)<->x552(VarCurr))).
% 297.15/295.36  all VarCurr (v7641(VarCurr)-> (v7638(VarCurr)<->v7625(VarCurr))).
% 297.15/295.36  all VarCurr (v7641(VarCurr)<->v7642(VarCurr)&v7643(VarCurr)).
% 297.15/295.36  all VarCurr (-v7643(VarCurr)<->v7623(VarCurr)).
% 297.15/295.36  all VarCurr (-v7642(VarCurr)<->v7621(VarCurr)).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7630(VarNext)<->v7631(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7631(VarNext)<->v7632(VarNext)&v7627(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7632(VarNext)<->v7634(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7634(VarNext)<->v7627(VarCurr))).
% 297.15/295.36  v7619(constB0)<->$F.
% 297.15/295.36  all VarCurr (v7627(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.36  all VarCurr (v7625(VarCurr)<->v202(VarCurr,bitIndex18)).
% 297.15/295.36  all VarCurr (v202(VarCurr,bitIndex18)<->v204(VarCurr,bitIndex18)).
% 297.15/295.36  all VarCurr (v204(VarCurr,bitIndex18)<->v546(VarCurr,bitIndex18)).
% 297.15/295.36  all VarCurr (v7623(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.36  all VarCurr (v7621(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7602(VarNext)-> (v7591(VarNext)<->v7591(VarCurr)))).
% 297.15/295.36  all VarNext (v7602(VarNext)-> (v7591(VarNext)<->v7612(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7612(VarNext)<->v7610(VarCurr))).
% 297.15/295.36  all VarCurr (-v7613(VarCurr)-> (v7610(VarCurr)<->x552(VarCurr))).
% 297.15/295.36  all VarCurr (v7613(VarCurr)-> (v7610(VarCurr)<->v7597(VarCurr))).
% 297.15/295.36  all VarCurr (v7613(VarCurr)<->v7614(VarCurr)&v7615(VarCurr)).
% 297.15/295.36  all VarCurr (-v7615(VarCurr)<->v7595(VarCurr)).
% 297.15/295.36  all VarCurr (-v7614(VarCurr)<->v7593(VarCurr)).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7602(VarNext)<->v7603(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7603(VarNext)<->v7604(VarNext)&v7599(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7604(VarNext)<->v7606(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7606(VarNext)<->v7599(VarCurr))).
% 297.15/295.36  v7591(constB0)<->$F.
% 297.15/295.36  all VarCurr (v7599(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.36  all VarCurr (v7597(VarCurr)<->v202(VarCurr,bitIndex19)).
% 297.15/295.36  all VarCurr (v202(VarCurr,bitIndex19)<->v204(VarCurr,bitIndex19)).
% 297.15/295.36  all VarCurr (v204(VarCurr,bitIndex19)<->v546(VarCurr,bitIndex19)).
% 297.15/295.36  all VarCurr (v7595(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.36  all VarCurr (v7593(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7574(VarNext)-> (v7563(VarNext)<->v7563(VarCurr)))).
% 297.15/295.36  all VarNext (v7574(VarNext)-> (v7563(VarNext)<->v7584(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7584(VarNext)<->v7582(VarCurr))).
% 297.15/295.36  all VarCurr (-v7585(VarCurr)-> (v7582(VarCurr)<->x552(VarCurr))).
% 297.15/295.36  all VarCurr (v7585(VarCurr)-> (v7582(VarCurr)<->v7569(VarCurr))).
% 297.15/295.36  all VarCurr (v7585(VarCurr)<->v7586(VarCurr)&v7587(VarCurr)).
% 297.15/295.36  all VarCurr (-v7587(VarCurr)<->v7567(VarCurr)).
% 297.15/295.36  all VarCurr (-v7586(VarCurr)<->v7565(VarCurr)).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7574(VarNext)<->v7575(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7575(VarNext)<->v7576(VarNext)&v7571(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7576(VarNext)<->v7578(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7578(VarNext)<->v7571(VarCurr))).
% 297.15/295.36  v7563(constB0)<->$F.
% 297.15/295.36  all VarCurr (v7571(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.36  all VarCurr (v7569(VarCurr)<->v202(VarCurr,bitIndex20)).
% 297.15/295.36  all VarCurr (v202(VarCurr,bitIndex20)<->v204(VarCurr,bitIndex20)).
% 297.15/295.36  all VarCurr (v204(VarCurr,bitIndex20)<->v546(VarCurr,bitIndex20)).
% 297.15/295.36  all VarCurr (v7567(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.36  all VarCurr (v7565(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7546(VarNext)-> (v7535(VarNext)<->v7535(VarCurr)))).
% 297.15/295.36  all VarNext (v7546(VarNext)-> (v7535(VarNext)<->v7556(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7556(VarNext)<->v7554(VarCurr))).
% 297.15/295.36  all VarCurr (-v7557(VarCurr)-> (v7554(VarCurr)<->x552(VarCurr))).
% 297.15/295.36  all VarCurr (v7557(VarCurr)-> (v7554(VarCurr)<->v7541(VarCurr))).
% 297.15/295.36  all VarCurr (v7557(VarCurr)<->v7558(VarCurr)&v7559(VarCurr)).
% 297.15/295.36  all VarCurr (-v7559(VarCurr)<->v7539(VarCurr)).
% 297.15/295.36  all VarCurr (-v7558(VarCurr)<->v7537(VarCurr)).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7546(VarNext)<->v7547(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7547(VarNext)<->v7548(VarNext)&v7543(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7548(VarNext)<->v7550(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7550(VarNext)<->v7543(VarCurr))).
% 297.15/295.36  v7535(constB0)<->$F.
% 297.15/295.36  all VarCurr (v7543(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.36  all VarCurr (v7541(VarCurr)<->v202(VarCurr,bitIndex21)).
% 297.15/295.36  all VarCurr (v202(VarCurr,bitIndex21)<->v204(VarCurr,bitIndex21)).
% 297.15/295.36  all VarCurr (v204(VarCurr,bitIndex21)<->v546(VarCurr,bitIndex21)).
% 297.15/295.36  all VarCurr (v7539(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.36  all VarCurr (v7537(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7518(VarNext)-> (v7507(VarNext)<->v7507(VarCurr)))).
% 297.15/295.36  all VarNext (v7518(VarNext)-> (v7507(VarNext)<->v7528(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7528(VarNext)<->v7526(VarCurr))).
% 297.15/295.36  all VarCurr (-v7529(VarCurr)-> (v7526(VarCurr)<->x552(VarCurr))).
% 297.15/295.36  all VarCurr (v7529(VarCurr)-> (v7526(VarCurr)<->v7513(VarCurr))).
% 297.15/295.36  all VarCurr (v7529(VarCurr)<->v7530(VarCurr)&v7531(VarCurr)).
% 297.15/295.36  all VarCurr (-v7531(VarCurr)<->v7511(VarCurr)).
% 297.15/295.36  all VarCurr (-v7530(VarCurr)<->v7509(VarCurr)).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7518(VarNext)<->v7519(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7519(VarNext)<->v7520(VarNext)&v7515(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7520(VarNext)<->v7522(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7522(VarNext)<->v7515(VarCurr))).
% 297.15/295.36  v7507(constB0)<->$F.
% 297.15/295.36  all VarCurr (v7515(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.36  all VarCurr (v7513(VarCurr)<->v202(VarCurr,bitIndex22)).
% 297.15/295.36  all VarCurr (v202(VarCurr,bitIndex22)<->v204(VarCurr,bitIndex22)).
% 297.15/295.36  all VarCurr (v204(VarCurr,bitIndex22)<->v546(VarCurr,bitIndex22)).
% 297.15/295.36  all VarCurr (v7511(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.36  all VarCurr (v7509(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7490(VarNext)-> (v7479(VarNext)<->v7479(VarCurr)))).
% 297.15/295.36  all VarNext (v7490(VarNext)-> (v7479(VarNext)<->v7500(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7500(VarNext)<->v7498(VarCurr))).
% 297.15/295.36  all VarCurr (-v7501(VarCurr)-> (v7498(VarCurr)<->x552(VarCurr))).
% 297.15/295.36  all VarCurr (v7501(VarCurr)-> (v7498(VarCurr)<->v7485(VarCurr))).
% 297.15/295.36  all VarCurr (v7501(VarCurr)<->v7502(VarCurr)&v7503(VarCurr)).
% 297.15/295.36  all VarCurr (-v7503(VarCurr)<->v7483(VarCurr)).
% 297.15/295.36  all VarCurr (-v7502(VarCurr)<->v7481(VarCurr)).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7490(VarNext)<->v7491(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7491(VarNext)<->v7492(VarNext)&v7487(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7492(VarNext)<->v7494(VarNext))).
% 297.15/295.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7494(VarNext)<->v7487(VarCurr))).
% 297.15/295.36  v7479(constB0)<->$F.
% 297.15/295.37  all VarCurr (v7487(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.37  all VarCurr (v7485(VarCurr)<->v202(VarCurr,bitIndex23)).
% 297.15/295.37  all VarCurr (v202(VarCurr,bitIndex23)<->v204(VarCurr,bitIndex23)).
% 297.15/295.37  all VarCurr (v204(VarCurr,bitIndex23)<->v546(VarCurr,bitIndex23)).
% 297.15/295.37  all VarCurr (v7483(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.37  all VarCurr (v7481(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7462(VarNext)-> (v7451(VarNext)<->v7451(VarCurr)))).
% 297.15/295.37  all VarNext (v7462(VarNext)-> (v7451(VarNext)<->v7472(VarNext))).
% 297.15/295.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7472(VarNext)<->v7470(VarCurr))).
% 297.15/295.37  all VarCurr (-v7473(VarCurr)-> (v7470(VarCurr)<->x552(VarCurr))).
% 297.15/295.37  all VarCurr (v7473(VarCurr)-> (v7470(VarCurr)<->v7457(VarCurr))).
% 297.15/295.37  all VarCurr (v7473(VarCurr)<->v7474(VarCurr)&v7475(VarCurr)).
% 297.15/295.37  all VarCurr (-v7475(VarCurr)<->v7455(VarCurr)).
% 297.15/295.37  all VarCurr (-v7474(VarCurr)<->v7453(VarCurr)).
% 297.15/295.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7462(VarNext)<->v7463(VarNext))).
% 297.15/295.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7463(VarNext)<->v7464(VarNext)&v7459(VarNext))).
% 297.15/295.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7464(VarNext)<->v7466(VarNext))).
% 297.15/295.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7466(VarNext)<->v7459(VarCurr))).
% 297.15/295.37  v7451(constB0)<->$F.
% 297.15/295.37  all VarCurr (v7459(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.37  all VarCurr (v7457(VarCurr)<->v202(VarCurr,bitIndex24)).
% 297.15/295.37  all VarCurr (v202(VarCurr,bitIndex24)<->v204(VarCurr,bitIndex24)).
% 297.15/295.37  all VarCurr (v204(VarCurr,bitIndex24)<->v546(VarCurr,bitIndex24)).
% 297.15/295.37  all VarCurr (v7455(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.37  all VarCurr (v7453(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7434(VarNext)-> (v7423(VarNext)<->v7423(VarCurr)))).
% 297.15/295.37  all VarNext (v7434(VarNext)-> (v7423(VarNext)<->v7444(VarNext))).
% 297.15/295.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7444(VarNext)<->v7442(VarCurr))).
% 297.15/295.37  all VarCurr (-v7445(VarCurr)-> (v7442(VarCurr)<->x552(VarCurr))).
% 297.15/295.37  all VarCurr (v7445(VarCurr)-> (v7442(VarCurr)<->v7429(VarCurr))).
% 297.15/295.37  all VarCurr (v7445(VarCurr)<->v7446(VarCurr)&v7447(VarCurr)).
% 297.15/295.37  all VarCurr (-v7447(VarCurr)<->v7427(VarCurr)).
% 297.15/295.37  all VarCurr (-v7446(VarCurr)<->v7425(VarCurr)).
% 297.15/295.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7434(VarNext)<->v7435(VarNext))).
% 297.15/295.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7435(VarNext)<->v7436(VarNext)&v7431(VarNext))).
% 297.15/295.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7436(VarNext)<->v7438(VarNext))).
% 297.15/295.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7438(VarNext)<->v7431(VarCurr))).
% 297.15/295.37  v7423(constB0)<->$F.
% 297.15/295.37  all VarCurr (v7431(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.37  all VarCurr (v7429(VarCurr)<->v202(VarCurr,bitIndex25)).
% 297.15/295.37  all VarCurr (v202(VarCurr,bitIndex25)<->v204(VarCurr,bitIndex25)).
% 297.15/295.37  all VarCurr (v204(VarCurr,bitIndex25)<->v546(VarCurr,bitIndex25)).
% 297.15/295.37  all VarCurr (v7427(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.37  all VarCurr (v7425(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7406(VarNext)-> (v7395(VarNext)<->v7395(VarCurr)))).
% 297.15/295.37  all VarNext (v7406(VarNext)-> (v7395(VarNext)<->v7416(VarNext))).
% 297.15/295.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7416(VarNext)<->v7414(VarCurr))).
% 297.15/295.37  all VarCurr (-v7417(VarCurr)-> (v7414(VarCurr)<->x552(VarCurr))).
% 297.15/295.37  all VarCurr (v7417(VarCurr)-> (v7414(VarCurr)<->v7401(VarCurr))).
% 297.15/295.37  all VarCurr (v7417(VarCurr)<->v7418(VarCurr)&v7419(VarCurr)).
% 297.15/295.37  all VarCurr (-v7419(VarCurr)<->v7399(VarCurr)).
% 297.15/295.37  all VarCurr (-v7418(VarCurr)<->v7397(VarCurr)).
% 297.15/295.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7406(VarNext)<->v7407(VarNext))).
% 297.15/295.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7407(VarNext)<->v7408(VarNext)&v7403(VarNext))).
% 297.15/295.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7408(VarNext)<->v7410(VarNext))).
% 297.15/295.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7410(VarNext)<->v7403(VarCurr))).
% 297.15/295.37  v7395(constB0)<->$F.
% 297.15/295.37  all VarCurr (v7403(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.37  all VarCurr (v7401(VarCurr)<->v202(VarCurr,bitIndex26)).
% 297.15/295.37  all VarCurr (v202(VarCurr,bitIndex26)<->v204(VarCurr,bitIndex26)).
% 297.15/295.37  all VarCurr (v204(VarCurr,bitIndex26)<->v546(VarCurr,bitIndex26)).
% 297.15/295.38  all VarCurr (v7399(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.38  all VarCurr (v7397(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7378(VarNext)-> (v7367(VarNext)<->v7367(VarCurr)))).
% 297.15/295.38  all VarNext (v7378(VarNext)-> (v7367(VarNext)<->v7388(VarNext))).
% 297.15/295.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7388(VarNext)<->v7386(VarCurr))).
% 297.15/295.38  all VarCurr (-v7389(VarCurr)-> (v7386(VarCurr)<->x552(VarCurr))).
% 297.15/295.38  all VarCurr (v7389(VarCurr)-> (v7386(VarCurr)<->v7373(VarCurr))).
% 297.15/295.38  all VarCurr (v7389(VarCurr)<->v7390(VarCurr)&v7391(VarCurr)).
% 297.15/295.38  all VarCurr (-v7391(VarCurr)<->v7371(VarCurr)).
% 297.15/295.38  all VarCurr (-v7390(VarCurr)<->v7369(VarCurr)).
% 297.15/295.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7378(VarNext)<->v7379(VarNext))).
% 297.15/295.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7379(VarNext)<->v7380(VarNext)&v7375(VarNext))).
% 297.15/295.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7380(VarNext)<->v7382(VarNext))).
% 297.15/295.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7382(VarNext)<->v7375(VarCurr))).
% 297.15/295.38  v7367(constB0)<->$F.
% 297.15/295.38  all VarCurr (v7375(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.38  all VarCurr (v7373(VarCurr)<->v202(VarCurr,bitIndex27)).
% 297.15/295.38  all VarCurr (v202(VarCurr,bitIndex27)<->v204(VarCurr,bitIndex27)).
% 297.15/295.38  all VarCurr (v204(VarCurr,bitIndex27)<->v546(VarCurr,bitIndex27)).
% 297.15/295.38  all VarCurr (v7371(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.38  all VarCurr (v7369(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7350(VarNext)-> (v7339(VarNext)<->v7339(VarCurr)))).
% 297.15/295.38  all VarNext (v7350(VarNext)-> (v7339(VarNext)<->v7360(VarNext))).
% 297.15/295.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7360(VarNext)<->v7358(VarCurr))).
% 297.15/295.38  all VarCurr (-v7361(VarCurr)-> (v7358(VarCurr)<->x552(VarCurr))).
% 297.15/295.38  all VarCurr (v7361(VarCurr)-> (v7358(VarCurr)<->v7345(VarCurr))).
% 297.15/295.38  all VarCurr (v7361(VarCurr)<->v7362(VarCurr)&v7363(VarCurr)).
% 297.15/295.38  all VarCurr (-v7363(VarCurr)<->v7343(VarCurr)).
% 297.15/295.38  all VarCurr (-v7362(VarCurr)<->v7341(VarCurr)).
% 297.15/295.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7350(VarNext)<->v7351(VarNext))).
% 297.15/295.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7351(VarNext)<->v7352(VarNext)&v7347(VarNext))).
% 297.15/295.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7352(VarNext)<->v7354(VarNext))).
% 297.15/295.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7354(VarNext)<->v7347(VarCurr))).
% 297.15/295.38  v7339(constB0)<->$F.
% 297.15/295.38  all VarCurr (v7347(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.38  all VarCurr (v7345(VarCurr)<->v202(VarCurr,bitIndex28)).
% 297.15/295.38  all VarCurr (v202(VarCurr,bitIndex28)<->v204(VarCurr,bitIndex28)).
% 297.15/295.38  all VarCurr (v204(VarCurr,bitIndex28)<->v546(VarCurr,bitIndex28)).
% 297.15/295.38  all VarCurr (v7343(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.38  all VarCurr (v7341(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7322(VarNext)-> (v7311(VarNext)<->v7311(VarCurr)))).
% 297.15/295.38  all VarNext (v7322(VarNext)-> (v7311(VarNext)<->v7332(VarNext))).
% 297.15/295.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7332(VarNext)<->v7330(VarCurr))).
% 297.15/295.38  all VarCurr (-v7333(VarCurr)-> (v7330(VarCurr)<->x552(VarCurr))).
% 297.15/295.38  all VarCurr (v7333(VarCurr)-> (v7330(VarCurr)<->v7317(VarCurr))).
% 297.15/295.38  all VarCurr (v7333(VarCurr)<->v7334(VarCurr)&v7335(VarCurr)).
% 297.15/295.38  all VarCurr (-v7335(VarCurr)<->v7315(VarCurr)).
% 297.15/295.38  all VarCurr (-v7334(VarCurr)<->v7313(VarCurr)).
% 297.15/295.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7322(VarNext)<->v7323(VarNext))).
% 297.15/295.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7323(VarNext)<->v7324(VarNext)&v7319(VarNext))).
% 297.15/295.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7324(VarNext)<->v7326(VarNext))).
% 297.15/295.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7326(VarNext)<->v7319(VarCurr))).
% 297.15/295.38  v7311(constB0)<->$F.
% 297.15/295.38  all VarCurr (v7319(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.38  all VarCurr (v7317(VarCurr)<->v202(VarCurr,bitIndex29)).
% 297.15/295.38  all VarCurr (v202(VarCurr,bitIndex29)<->v204(VarCurr,bitIndex29)).
% 297.15/295.38  all VarCurr (v204(VarCurr,bitIndex29)<->v546(VarCurr,bitIndex29)).
% 297.15/295.38  all VarCurr (v7315(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.38  all VarCurr (v7313(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7294(VarNext)-> (v7283(VarNext)<->v7283(VarCurr)))).
% 297.15/295.39  all VarNext (v7294(VarNext)-> (v7283(VarNext)<->v7304(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7304(VarNext)<->v7302(VarCurr))).
% 297.15/295.39  all VarCurr (-v7305(VarCurr)-> (v7302(VarCurr)<->x552(VarCurr))).
% 297.15/295.39  all VarCurr (v7305(VarCurr)-> (v7302(VarCurr)<->v7289(VarCurr))).
% 297.15/295.39  all VarCurr (v7305(VarCurr)<->v7306(VarCurr)&v7307(VarCurr)).
% 297.15/295.39  all VarCurr (-v7307(VarCurr)<->v7287(VarCurr)).
% 297.15/295.39  all VarCurr (-v7306(VarCurr)<->v7285(VarCurr)).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7294(VarNext)<->v7295(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7295(VarNext)<->v7296(VarNext)&v7291(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7296(VarNext)<->v7298(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7298(VarNext)<->v7291(VarCurr))).
% 297.15/295.39  v7283(constB0)<->$F.
% 297.15/295.39  all VarCurr (v7291(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.39  all VarCurr (v7289(VarCurr)<->v202(VarCurr,bitIndex31)).
% 297.15/295.39  all VarCurr (v202(VarCurr,bitIndex31)<->v204(VarCurr,bitIndex31)).
% 297.15/295.39  all VarCurr (v204(VarCurr,bitIndex31)<->v546(VarCurr,bitIndex31)).
% 297.15/295.39  all VarCurr (v7287(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.39  all VarCurr (v7285(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7266(VarNext)-> (v7255(VarNext)<->v7255(VarCurr)))).
% 297.15/295.39  all VarNext (v7266(VarNext)-> (v7255(VarNext)<->v7276(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7276(VarNext)<->v7274(VarCurr))).
% 297.15/295.39  all VarCurr (-v7277(VarCurr)-> (v7274(VarCurr)<->x552(VarCurr))).
% 297.15/295.39  all VarCurr (v7277(VarCurr)-> (v7274(VarCurr)<->v7261(VarCurr))).
% 297.15/295.39  all VarCurr (v7277(VarCurr)<->v7278(VarCurr)&v7279(VarCurr)).
% 297.15/295.39  all VarCurr (-v7279(VarCurr)<->v7259(VarCurr)).
% 297.15/295.39  all VarCurr (-v7278(VarCurr)<->v7257(VarCurr)).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7266(VarNext)<->v7267(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7267(VarNext)<->v7268(VarNext)&v7263(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7268(VarNext)<->v7270(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7270(VarNext)<->v7263(VarCurr))).
% 297.15/295.39  v7255(constB0)<->$F.
% 297.15/295.39  all VarCurr (v7263(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.39  all VarCurr (v7261(VarCurr)<->v202(VarCurr,bitIndex32)).
% 297.15/295.39  all VarCurr (v202(VarCurr,bitIndex32)<->v204(VarCurr,bitIndex32)).
% 297.15/295.39  all VarCurr (v204(VarCurr,bitIndex32)<->v546(VarCurr,bitIndex32)).
% 297.15/295.39  all VarCurr (v7259(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.39  all VarCurr (v7257(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7238(VarNext)-> (v7227(VarNext)<->v7227(VarCurr)))).
% 297.15/295.39  all VarNext (v7238(VarNext)-> (v7227(VarNext)<->v7248(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7248(VarNext)<->v7246(VarCurr))).
% 297.15/295.39  all VarCurr (-v7249(VarCurr)-> (v7246(VarCurr)<->x552(VarCurr))).
% 297.15/295.39  all VarCurr (v7249(VarCurr)-> (v7246(VarCurr)<->v7233(VarCurr))).
% 297.15/295.39  all VarCurr (v7249(VarCurr)<->v7250(VarCurr)&v7251(VarCurr)).
% 297.15/295.39  all VarCurr (-v7251(VarCurr)<->v7231(VarCurr)).
% 297.15/295.39  all VarCurr (-v7250(VarCurr)<->v7229(VarCurr)).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7238(VarNext)<->v7239(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7239(VarNext)<->v7240(VarNext)&v7235(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7240(VarNext)<->v7242(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7242(VarNext)<->v7235(VarCurr))).
% 297.15/295.39  v7227(constB0)<->$F.
% 297.15/295.39  all VarCurr (v7235(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.39  all VarCurr (v7233(VarCurr)<->v202(VarCurr,bitIndex33)).
% 297.15/295.39  all VarCurr (v202(VarCurr,bitIndex33)<->v204(VarCurr,bitIndex33)).
% 297.15/295.39  all VarCurr (v204(VarCurr,bitIndex33)<->v546(VarCurr,bitIndex33)).
% 297.15/295.39  all VarCurr (v7231(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.39  all VarCurr (v7229(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7210(VarNext)-> (v7199(VarNext)<->v7199(VarCurr)))).
% 297.15/295.39  all VarNext (v7210(VarNext)-> (v7199(VarNext)<->v7220(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7220(VarNext)<->v7218(VarCurr))).
% 297.15/295.39  all VarCurr (-v7221(VarCurr)-> (v7218(VarCurr)<->x552(VarCurr))).
% 297.15/295.39  all VarCurr (v7221(VarCurr)-> (v7218(VarCurr)<->v7205(VarCurr))).
% 297.15/295.39  all VarCurr (v7221(VarCurr)<->v7222(VarCurr)&v7223(VarCurr)).
% 297.15/295.39  all VarCurr (-v7223(VarCurr)<->v7203(VarCurr)).
% 297.15/295.39  all VarCurr (-v7222(VarCurr)<->v7201(VarCurr)).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7210(VarNext)<->v7211(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7211(VarNext)<->v7212(VarNext)&v7207(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7212(VarNext)<->v7214(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7214(VarNext)<->v7207(VarCurr))).
% 297.15/295.39  v7199(constB0)<->$F.
% 297.15/295.39  all VarCurr (v7207(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.39  all VarCurr (v7205(VarCurr)<->v202(VarCurr,bitIndex34)).
% 297.15/295.39  all VarCurr (v202(VarCurr,bitIndex34)<->v204(VarCurr,bitIndex34)).
% 297.15/295.39  all VarCurr (v204(VarCurr,bitIndex34)<->v546(VarCurr,bitIndex34)).
% 297.15/295.39  all VarCurr (v7203(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.39  all VarCurr (v7201(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7182(VarNext)-> (v7171(VarNext)<->v7171(VarCurr)))).
% 297.15/295.39  all VarNext (v7182(VarNext)-> (v7171(VarNext)<->v7192(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7192(VarNext)<->v7190(VarCurr))).
% 297.15/295.39  all VarCurr (-v7193(VarCurr)-> (v7190(VarCurr)<->x552(VarCurr))).
% 297.15/295.39  all VarCurr (v7193(VarCurr)-> (v7190(VarCurr)<->v7177(VarCurr))).
% 297.15/295.39  all VarCurr (v7193(VarCurr)<->v7194(VarCurr)&v7195(VarCurr)).
% 297.15/295.39  all VarCurr (-v7195(VarCurr)<->v7175(VarCurr)).
% 297.15/295.39  all VarCurr (-v7194(VarCurr)<->v7173(VarCurr)).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7182(VarNext)<->v7183(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7183(VarNext)<->v7184(VarNext)&v7179(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7184(VarNext)<->v7186(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7186(VarNext)<->v7179(VarCurr))).
% 297.15/295.39  v7171(constB0)<->$F.
% 297.15/295.39  all VarCurr (v7179(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.39  all VarCurr (v7177(VarCurr)<->v202(VarCurr,bitIndex35)).
% 297.15/295.39  all VarCurr (v202(VarCurr,bitIndex35)<->v204(VarCurr,bitIndex35)).
% 297.15/295.39  all VarCurr (v204(VarCurr,bitIndex35)<->v546(VarCurr,bitIndex35)).
% 297.15/295.39  all VarCurr (v7175(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.39  all VarCurr (v7173(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7154(VarNext)-> (v7143(VarNext)<->v7143(VarCurr)))).
% 297.15/295.39  all VarNext (v7154(VarNext)-> (v7143(VarNext)<->v7164(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7164(VarNext)<->v7162(VarCurr))).
% 297.15/295.39  all VarCurr (-v7165(VarCurr)-> (v7162(VarCurr)<->x552(VarCurr))).
% 297.15/295.39  all VarCurr (v7165(VarCurr)-> (v7162(VarCurr)<->v7149(VarCurr))).
% 297.15/295.39  all VarCurr (v7165(VarCurr)<->v7166(VarCurr)&v7167(VarCurr)).
% 297.15/295.39  all VarCurr (-v7167(VarCurr)<->v7147(VarCurr)).
% 297.15/295.39  all VarCurr (-v7166(VarCurr)<->v7145(VarCurr)).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7154(VarNext)<->v7155(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7155(VarNext)<->v7156(VarNext)&v7151(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7156(VarNext)<->v7158(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7158(VarNext)<->v7151(VarCurr))).
% 297.15/295.39  v7143(constB0)<->$F.
% 297.15/295.39  all VarCurr (v7151(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.39  all VarCurr (v7149(VarCurr)<->v202(VarCurr,bitIndex36)).
% 297.15/295.39  all VarCurr (v202(VarCurr,bitIndex36)<->v204(VarCurr,bitIndex36)).
% 297.15/295.39  all VarCurr (v204(VarCurr,bitIndex36)<->v546(VarCurr,bitIndex36)).
% 297.15/295.39  all VarCurr (v7147(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.39  all VarCurr (v7145(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7126(VarNext)-> (v7115(VarNext)<->v7115(VarCurr)))).
% 297.15/295.39  all VarNext (v7126(VarNext)-> (v7115(VarNext)<->v7136(VarNext))).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7136(VarNext)<->v7134(VarCurr))).
% 297.15/295.39  all VarCurr (-v7137(VarCurr)-> (v7134(VarCurr)<->x552(VarCurr))).
% 297.15/295.39  all VarCurr (v7137(VarCurr)-> (v7134(VarCurr)<->v7121(VarCurr))).
% 297.15/295.39  all VarCurr (v7137(VarCurr)<->v7138(VarCurr)&v7139(VarCurr)).
% 297.15/295.39  all VarCurr (-v7139(VarCurr)<->v7119(VarCurr)).
% 297.15/295.39  all VarCurr (-v7138(VarCurr)<->v7117(VarCurr)).
% 297.15/295.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7126(VarNext)<->v7127(VarNext))).
% 297.15/295.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7127(VarNext)<->v7128(VarNext)&v7123(VarNext))).
% 297.15/295.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7128(VarNext)<->v7130(VarNext))).
% 297.15/295.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7130(VarNext)<->v7123(VarCurr))).
% 297.15/295.40  v7115(constB0)<->$F.
% 297.15/295.40  all VarCurr (v7123(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.40  all VarCurr (v7121(VarCurr)<->v202(VarCurr,bitIndex37)).
% 297.15/295.40  all VarCurr (v202(VarCurr,bitIndex37)<->v204(VarCurr,bitIndex37)).
% 297.15/295.40  all VarCurr (v204(VarCurr,bitIndex37)<->v546(VarCurr,bitIndex37)).
% 297.15/295.40  all VarCurr (v7119(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.40  all VarCurr (v7117(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7098(VarNext)-> (v7087(VarNext)<->v7087(VarCurr)))).
% 297.15/295.40  all VarNext (v7098(VarNext)-> (v7087(VarNext)<->v7108(VarNext))).
% 297.15/295.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7108(VarNext)<->v7106(VarCurr))).
% 297.15/295.40  all VarCurr (-v7109(VarCurr)-> (v7106(VarCurr)<->x552(VarCurr))).
% 297.15/295.40  all VarCurr (v7109(VarCurr)-> (v7106(VarCurr)<->v7093(VarCurr))).
% 297.15/295.40  all VarCurr (v7109(VarCurr)<->v7110(VarCurr)&v7111(VarCurr)).
% 297.15/295.40  all VarCurr (-v7111(VarCurr)<->v7091(VarCurr)).
% 297.15/295.40  all VarCurr (-v7110(VarCurr)<->v7089(VarCurr)).
% 297.15/295.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7098(VarNext)<->v7099(VarNext))).
% 297.15/295.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7099(VarNext)<->v7100(VarNext)&v7095(VarNext))).
% 297.15/295.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7100(VarNext)<->v7102(VarNext))).
% 297.15/295.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7102(VarNext)<->v7095(VarCurr))).
% 297.15/295.40  v7087(constB0)<->$F.
% 297.15/295.40  all VarCurr (v7095(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.40  all VarCurr (v7093(VarCurr)<->v202(VarCurr,bitIndex38)).
% 297.15/295.40  all VarCurr (v202(VarCurr,bitIndex38)<->v204(VarCurr,bitIndex38)).
% 297.15/295.40  all VarCurr (v204(VarCurr,bitIndex38)<->v546(VarCurr,bitIndex38)).
% 297.15/295.40  all VarCurr (v7091(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.40  all VarCurr (v7089(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7070(VarNext)-> (v7059(VarNext)<->v7059(VarCurr)))).
% 297.15/295.40  all VarNext (v7070(VarNext)-> (v7059(VarNext)<->v7080(VarNext))).
% 297.15/295.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7080(VarNext)<->v7078(VarCurr))).
% 297.15/295.40  all VarCurr (-v7081(VarCurr)-> (v7078(VarCurr)<->x552(VarCurr))).
% 297.15/295.40  all VarCurr (v7081(VarCurr)-> (v7078(VarCurr)<->v7065(VarCurr))).
% 297.15/295.40  all VarCurr (v7081(VarCurr)<->v7082(VarCurr)&v7083(VarCurr)).
% 297.15/295.40  all VarCurr (-v7083(VarCurr)<->v7063(VarCurr)).
% 297.15/295.40  all VarCurr (-v7082(VarCurr)<->v7061(VarCurr)).
% 297.15/295.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7070(VarNext)<->v7071(VarNext))).
% 297.15/295.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7071(VarNext)<->v7072(VarNext)&v7067(VarNext))).
% 297.15/295.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7072(VarNext)<->v7074(VarNext))).
% 297.15/295.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7074(VarNext)<->v7067(VarCurr))).
% 297.15/295.40  v7059(constB0)<->$F.
% 297.15/295.40  all VarCurr (v7067(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.15/295.40  all VarCurr (v7065(VarCurr)<->v202(VarCurr,bitIndex39)).
% 297.15/295.40  all VarCurr (v202(VarCurr,bitIndex39)<->v204(VarCurr,bitIndex39)).
% 297.15/295.40  all VarCurr (v204(VarCurr,bitIndex39)<->v546(VarCurr,bitIndex39)).
% 297.15/295.40  all VarCurr (v7063(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.15/295.40  all VarCurr (v7061(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.15/295.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7042(VarNext)-> (v7031(VarNext)<->v7031(VarCurr)))).
% 297.15/295.40  all VarNext (v7042(VarNext)-> (v7031(VarNext)<->v7052(VarNext))).
% 297.15/295.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7052(VarNext)<->v7050(VarCurr))).
% 297.15/295.40  all VarCurr (-v7053(VarCurr)-> (v7050(VarCurr)<->x552(VarCurr))).
% 297.15/295.40  all VarCurr (v7053(VarCurr)-> (v7050(VarCurr)<->v7037(VarCurr))).
% 297.15/295.40  all VarCurr (v7053(VarCurr)<->v7054(VarCurr)&v7055(VarCurr)).
% 297.15/295.40  all VarCurr (-v7055(VarCurr)<->v7035(VarCurr)).
% 297.15/295.40  all VarCurr (-v7054(VarCurr)<->v7033(VarCurr)).
% 297.15/295.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7042(VarNext)<->v7043(VarNext))).
% 297.15/295.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7043(VarNext)<->v7044(VarNext)&v7039(VarNext))).
% 297.15/295.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7044(VarNext)<->v7046(VarNext))).
% 297.20/295.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7046(VarNext)<->v7039(VarCurr))).
% 297.20/295.41  v7031(constB0)<->$F.
% 297.20/295.41  all VarCurr (v7039(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.20/295.41  all VarCurr (v7037(VarCurr)<->v202(VarCurr,bitIndex40)).
% 297.20/295.41  all VarCurr (v202(VarCurr,bitIndex40)<->v204(VarCurr,bitIndex40)).
% 297.20/295.41  all VarCurr (v204(VarCurr,bitIndex40)<->v546(VarCurr,bitIndex40)).
% 297.20/295.41  all VarCurr (v7035(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.20/295.41  all VarCurr (v7033(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.20/295.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7014(VarNext)-> (v7003(VarNext)<->v7003(VarCurr)))).
% 297.20/295.41  all VarNext (v7014(VarNext)-> (v7003(VarNext)<->v7024(VarNext))).
% 297.20/295.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7024(VarNext)<->v7022(VarCurr))).
% 297.20/295.41  all VarCurr (-v7025(VarCurr)-> (v7022(VarCurr)<->x552(VarCurr))).
% 297.20/295.41  all VarCurr (v7025(VarCurr)-> (v7022(VarCurr)<->v7009(VarCurr))).
% 297.20/295.41  all VarCurr (v7025(VarCurr)<->v7026(VarCurr)&v7027(VarCurr)).
% 297.20/295.41  all VarCurr (-v7027(VarCurr)<->v7007(VarCurr)).
% 297.20/295.41  all VarCurr (-v7026(VarCurr)<->v7005(VarCurr)).
% 297.20/295.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7014(VarNext)<->v7015(VarNext))).
% 297.20/295.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7015(VarNext)<->v7016(VarNext)&v7011(VarNext))).
% 297.20/295.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v7016(VarNext)<->v7018(VarNext))).
% 297.20/295.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v7018(VarNext)<->v7011(VarCurr))).
% 297.20/295.41  v7003(constB0)<->$F.
% 297.20/295.41  all VarCurr (v7011(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.20/295.41  all VarCurr (v7009(VarCurr)<->v202(VarCurr,bitIndex41)).
% 297.20/295.41  all VarCurr (v202(VarCurr,bitIndex41)<->v204(VarCurr,bitIndex41)).
% 297.20/295.41  all VarCurr (v204(VarCurr,bitIndex41)<->v546(VarCurr,bitIndex41)).
% 297.20/295.41  all VarCurr (v7007(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.20/295.41  all VarCurr (v7005(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.20/295.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6986(VarNext)-> (v6975(VarNext)<->v6975(VarCurr)))).
% 297.20/295.41  all VarNext (v6986(VarNext)-> (v6975(VarNext)<->v6996(VarNext))).
% 297.20/295.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6996(VarNext)<->v6994(VarCurr))).
% 297.20/295.41  all VarCurr (-v6997(VarCurr)-> (v6994(VarCurr)<->x552(VarCurr))).
% 297.20/295.41  all VarCurr (v6997(VarCurr)-> (v6994(VarCurr)<->v6981(VarCurr))).
% 297.20/295.41  all VarCurr (v6997(VarCurr)<->v6998(VarCurr)&v6999(VarCurr)).
% 297.20/295.41  all VarCurr (-v6999(VarCurr)<->v6979(VarCurr)).
% 297.20/295.41  all VarCurr (-v6998(VarCurr)<->v6977(VarCurr)).
% 297.20/295.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6986(VarNext)<->v6987(VarNext))).
% 297.20/295.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6987(VarNext)<->v6988(VarNext)&v6983(VarNext))).
% 297.20/295.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6988(VarNext)<->v6990(VarNext))).
% 297.20/295.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6990(VarNext)<->v6983(VarCurr))).
% 297.20/295.41  v6975(constB0)<->$F.
% 297.20/295.41  all VarCurr (v6983(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.20/295.41  all VarCurr (v6981(VarCurr)<->v202(VarCurr,bitIndex42)).
% 297.20/295.41  all VarCurr (v202(VarCurr,bitIndex42)<->v204(VarCurr,bitIndex42)).
% 297.20/295.41  all VarCurr (v204(VarCurr,bitIndex42)<->v546(VarCurr,bitIndex42)).
% 297.20/295.41  all VarCurr (v6979(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.20/295.41  all VarCurr (v6977(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.20/295.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6958(VarNext)-> (v6947(VarNext)<->v6947(VarCurr)))).
% 297.20/295.41  all VarNext (v6958(VarNext)-> (v6947(VarNext)<->v6968(VarNext))).
% 297.20/295.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6968(VarNext)<->v6966(VarCurr))).
% 297.20/295.41  all VarCurr (-v6969(VarCurr)-> (v6966(VarCurr)<->x552(VarCurr))).
% 297.20/295.41  all VarCurr (v6969(VarCurr)-> (v6966(VarCurr)<->v6953(VarCurr))).
% 297.20/295.41  all VarCurr (v6969(VarCurr)<->v6970(VarCurr)&v6971(VarCurr)).
% 297.20/295.41  all VarCurr (-v6971(VarCurr)<->v6951(VarCurr)).
% 297.20/295.41  all VarCurr (-v6970(VarCurr)<->v6949(VarCurr)).
% 297.20/295.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6958(VarNext)<->v6959(VarNext))).
% 297.20/295.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6959(VarNext)<->v6960(VarNext)&v6955(VarNext))).
% 297.20/295.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6960(VarNext)<->v6962(VarNext))).
% 297.20/295.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6962(VarNext)<->v6955(VarCurr))).
% 297.20/295.41  v6947(constB0)<->$F.
% 297.20/295.41  all VarCurr (v6955(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.20/295.41  all VarCurr (v6953(VarCurr)<->v202(VarCurr,bitIndex43)).
% 297.20/295.42  all VarCurr (v202(VarCurr,bitIndex43)<->v204(VarCurr,bitIndex43)).
% 297.20/295.42  all VarCurr (v204(VarCurr,bitIndex43)<->v546(VarCurr,bitIndex43)).
% 297.20/295.42  all VarCurr (v6951(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.20/295.42  all VarCurr (v6949(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.20/295.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6930(VarNext)-> (v6919(VarNext)<->v6919(VarCurr)))).
% 297.20/295.42  all VarNext (v6930(VarNext)-> (v6919(VarNext)<->v6940(VarNext))).
% 297.20/295.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6940(VarNext)<->v6938(VarCurr))).
% 297.20/295.42  all VarCurr (-v6941(VarCurr)-> (v6938(VarCurr)<->x552(VarCurr))).
% 297.20/295.42  all VarCurr (v6941(VarCurr)-> (v6938(VarCurr)<->v6925(VarCurr))).
% 297.20/295.42  all VarCurr (v6941(VarCurr)<->v6942(VarCurr)&v6943(VarCurr)).
% 297.20/295.42  all VarCurr (-v6943(VarCurr)<->v6923(VarCurr)).
% 297.20/295.42  all VarCurr (-v6942(VarCurr)<->v6921(VarCurr)).
% 297.20/295.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6930(VarNext)<->v6931(VarNext))).
% 297.20/295.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6931(VarNext)<->v6932(VarNext)&v6927(VarNext))).
% 297.20/295.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6932(VarNext)<->v6934(VarNext))).
% 297.20/295.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6934(VarNext)<->v6927(VarCurr))).
% 297.20/295.42  v6919(constB0)<->$F.
% 297.20/295.42  all VarCurr (v6927(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.20/295.42  all VarCurr (v6925(VarCurr)<->v202(VarCurr,bitIndex44)).
% 297.20/295.42  all VarCurr (v202(VarCurr,bitIndex44)<->v204(VarCurr,bitIndex44)).
% 297.20/295.42  all VarCurr (v204(VarCurr,bitIndex44)<->v546(VarCurr,bitIndex44)).
% 297.20/295.42  all VarCurr (v6923(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.20/295.42  all VarCurr (v6921(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.20/295.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6902(VarNext)-> (v6891(VarNext)<->v6891(VarCurr)))).
% 297.20/295.42  all VarNext (v6902(VarNext)-> (v6891(VarNext)<->v6912(VarNext))).
% 297.20/295.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6912(VarNext)<->v6910(VarCurr))).
% 297.20/295.42  all VarCurr (-v6913(VarCurr)-> (v6910(VarCurr)<->x552(VarCurr))).
% 297.20/295.42  all VarCurr (v6913(VarCurr)-> (v6910(VarCurr)<->v6897(VarCurr))).
% 297.20/295.42  all VarCurr (v6913(VarCurr)<->v6914(VarCurr)&v6915(VarCurr)).
% 297.20/295.42  all VarCurr (-v6915(VarCurr)<->v6895(VarCurr)).
% 297.20/295.42  all VarCurr (-v6914(VarCurr)<->v6893(VarCurr)).
% 297.20/295.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6902(VarNext)<->v6903(VarNext))).
% 297.20/295.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6903(VarNext)<->v6904(VarNext)&v6899(VarNext))).
% 297.20/295.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6904(VarNext)<->v6906(VarNext))).
% 297.20/295.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6906(VarNext)<->v6899(VarCurr))).
% 297.20/295.42  v6891(constB0)<->$F.
% 297.20/295.42  all VarCurr (v6899(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.20/295.42  all VarCurr (v6897(VarCurr)<->v202(VarCurr,bitIndex45)).
% 297.20/295.42  all VarCurr (v202(VarCurr,bitIndex45)<->v204(VarCurr,bitIndex45)).
% 297.20/295.42  all VarCurr (v204(VarCurr,bitIndex45)<->v546(VarCurr,bitIndex45)).
% 297.20/295.42  all VarCurr (v6895(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.20/295.42  all VarCurr (v6893(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.20/295.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6874(VarNext)-> (v6863(VarNext)<->v6863(VarCurr)))).
% 297.20/295.42  all VarNext (v6874(VarNext)-> (v6863(VarNext)<->v6884(VarNext))).
% 297.20/295.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6884(VarNext)<->v6882(VarCurr))).
% 297.20/295.42  all VarCurr (-v6885(VarCurr)-> (v6882(VarCurr)<->x552(VarCurr))).
% 297.20/295.42  all VarCurr (v6885(VarCurr)-> (v6882(VarCurr)<->v6869(VarCurr))).
% 297.20/295.42  all VarCurr (v6885(VarCurr)<->v6886(VarCurr)&v6887(VarCurr)).
% 297.20/295.42  all VarCurr (-v6887(VarCurr)<->v6867(VarCurr)).
% 297.20/295.42  all VarCurr (-v6886(VarCurr)<->v6865(VarCurr)).
% 297.20/295.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6874(VarNext)<->v6875(VarNext))).
% 297.20/295.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6875(VarNext)<->v6876(VarNext)&v6871(VarNext))).
% 297.20/295.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6876(VarNext)<->v6878(VarNext))).
% 297.20/295.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6878(VarNext)<->v6871(VarCurr))).
% 297.20/295.42  v6863(constB0)<->$F.
% 297.20/295.42  all VarCurr (v6871(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.20/295.42  all VarCurr (v6869(VarCurr)<->v202(VarCurr,bitIndex46)).
% 297.20/295.42  all VarCurr (v202(VarCurr,bitIndex46)<->v204(VarCurr,bitIndex46)).
% 297.20/295.42  all VarCurr (v204(VarCurr,bitIndex46)<->v546(VarCurr,bitIndex46)).
% 297.20/295.42  all VarCurr (v6867(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.20/295.42  all VarCurr (v6865(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.20/295.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6846(VarNext)-> (v6835(VarNext)<->v6835(VarCurr)))).
% 297.20/295.43  all VarNext (v6846(VarNext)-> (v6835(VarNext)<->v6856(VarNext))).
% 297.20/295.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6856(VarNext)<->v6854(VarCurr))).
% 297.20/295.43  all VarCurr (-v6857(VarCurr)-> (v6854(VarCurr)<->x552(VarCurr))).
% 297.20/295.43  all VarCurr (v6857(VarCurr)-> (v6854(VarCurr)<->v6841(VarCurr))).
% 297.20/295.43  all VarCurr (v6857(VarCurr)<->v6858(VarCurr)&v6859(VarCurr)).
% 297.20/295.43  all VarCurr (-v6859(VarCurr)<->v6839(VarCurr)).
% 297.20/295.43  all VarCurr (-v6858(VarCurr)<->v6837(VarCurr)).
% 297.20/295.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6846(VarNext)<->v6847(VarNext))).
% 297.20/295.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6847(VarNext)<->v6848(VarNext)&v6843(VarNext))).
% 297.20/295.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6848(VarNext)<->v6850(VarNext))).
% 297.20/295.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6850(VarNext)<->v6843(VarCurr))).
% 297.20/295.43  v6835(constB0)<->$F.
% 297.20/295.43  all VarCurr (v6843(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.20/295.43  all VarCurr (v6841(VarCurr)<->v202(VarCurr,bitIndex47)).
% 297.20/295.43  all VarCurr (v202(VarCurr,bitIndex47)<->v204(VarCurr,bitIndex47)).
% 297.20/295.43  all VarCurr (v204(VarCurr,bitIndex47)<->v546(VarCurr,bitIndex47)).
% 297.20/295.43  all VarCurr (v6839(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.20/295.43  all VarCurr (v6837(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.20/295.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6818(VarNext)-> (v6742(VarNext)<->v6742(VarCurr)))).
% 297.20/295.43  all VarNext (v6818(VarNext)-> (v6742(VarNext)<->v6828(VarNext))).
% 297.20/295.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6828(VarNext)<->v6826(VarCurr))).
% 297.20/295.43  all VarCurr (-v6829(VarCurr)-> (v6826(VarCurr)<->x552(VarCurr))).
% 297.20/295.43  all VarCurr (v6829(VarCurr)-> (v6826(VarCurr)<->v6748(VarCurr))).
% 297.20/295.43  all VarCurr (v6829(VarCurr)<->v6830(VarCurr)&v6831(VarCurr)).
% 297.20/295.43  all VarCurr (-v6831(VarCurr)<->v6746(VarCurr)).
% 297.20/295.43  all VarCurr (-v6830(VarCurr)<->v6744(VarCurr)).
% 297.20/295.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6818(VarNext)<->v6819(VarNext))).
% 297.20/295.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6819(VarNext)<->v6820(VarNext)&v6815(VarNext))).
% 297.20/295.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6820(VarNext)<->v6822(VarNext))).
% 297.20/295.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6822(VarNext)<->v6815(VarCurr))).
% 297.20/295.43  v6742(constB0)<->$F.
% 297.20/295.43  all VarCurr (v6815(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.20/295.43  all VarCurr (v6748(VarCurr)<->v202(VarCurr,bitIndex48)).
% 297.20/295.43  all VarCurr (v202(VarCurr,bitIndex48)<->v204(VarCurr,bitIndex48)).
% 297.20/295.43  all VarCurr (v204(VarCurr,bitIndex48)<->v546(VarCurr,bitIndex48)).
% 297.20/295.43  all VarCurr (-v6491(VarCurr,bitIndex0)-> (v212(VarCurr,bitIndex48)<->$T)).
% 297.20/295.43  all VarCurr (v6491(VarCurr,bitIndex0)-> (v212(VarCurr,bitIndex48)<->$F)).
% 297.20/295.43  all VarCurr (v6491(VarCurr,bitIndex0)<->v6757(VarCurr)&v6812(VarCurr)).
% 297.20/295.43  all VarCurr (v6812(VarCurr)<->v6759(VarCurr)|v6777(VarCurr)).
% 297.20/295.43  all VarCurr (v6757(VarCurr)<->v6758(VarCurr)|v6776(VarCurr)).
% 297.20/295.43  all VarCurr (-v6776(VarCurr)<->v6777(VarCurr)).
% 297.20/295.43  all VarCurr (v6777(VarCurr)<->v6778(VarCurr)&v6811(VarCurr)).
% 297.20/295.43  all VarCurr (v6811(VarCurr)<->v6780(VarCurr)|v6796(VarCurr)).
% 297.20/295.43  all VarCurr (v6778(VarCurr)<->v6779(VarCurr)|v6795(VarCurr)).
% 297.20/295.43  all VarCurr (-v6795(VarCurr)<->v6796(VarCurr)).
% 297.20/295.43  all VarCurr (v6796(VarCurr)<->v6797(VarCurr)&v6810(VarCurr)).
% 297.20/295.43  all VarCurr (v6810(VarCurr)<->v6762(VarCurr,bitIndex7)|v6800(VarCurr)).
% 297.20/295.43  all VarCurr (v6797(VarCurr)<->v6798(VarCurr)|v6799(VarCurr)).
% 297.20/295.43  all VarCurr (-v6799(VarCurr)<->v6800(VarCurr)).
% 297.20/295.43  all VarCurr (v6800(VarCurr)<->v6801(VarCurr)&v6809(VarCurr)).
% 297.20/295.43  all VarCurr (v6809(VarCurr)<->v6762(VarCurr,bitIndex6)|v6804(VarCurr)).
% 297.20/295.43  all VarCurr (v6801(VarCurr)<->v6802(VarCurr)|v6803(VarCurr)).
% 297.20/295.43  all VarCurr (-v6803(VarCurr)<->v6804(VarCurr)).
% 297.20/295.43  all VarCurr (v6804(VarCurr)<->v6805(VarCurr)&v6808(VarCurr)).
% 297.20/295.43  all VarCurr (v6808(VarCurr)<->v6762(VarCurr,bitIndex4)|v6762(VarCurr,bitIndex5)).
% 297.20/295.43  all VarCurr (v6805(VarCurr)<->v6806(VarCurr)|v6807(VarCurr)).
% 297.20/295.43  all VarCurr (-v6807(VarCurr)<->v6762(VarCurr,bitIndex5)).
% 297.20/295.43  all VarCurr (-v6806(VarCurr)<->v6762(VarCurr,bitIndex4)).
% 297.20/295.43  all VarCurr (-v6802(VarCurr)<->v6762(VarCurr,bitIndex6)).
% 297.20/295.43  all VarCurr (-v6798(VarCurr)<->v6762(VarCurr,bitIndex7)).
% 297.20/295.43  all VarCurr (-v6779(VarCurr)<->v6780(VarCurr)).
% 297.20/295.43  all VarCurr (v6780(VarCurr)<->v6781(VarCurr)&v6794(VarCurr)).
% 297.20/295.43  all VarCurr (v6794(VarCurr)<->v6762(VarCurr,bitIndex3)|v6784(VarCurr)).
% 297.20/295.43  all VarCurr (v6781(VarCurr)<->v6782(VarCurr)|v6783(VarCurr)).
% 297.20/295.43  all VarCurr (-v6783(VarCurr)<->v6784(VarCurr)).
% 297.20/295.43  all VarCurr (v6784(VarCurr)<->v6785(VarCurr)&v6793(VarCurr)).
% 297.20/295.43  all VarCurr (v6793(VarCurr)<->v6762(VarCurr,bitIndex2)|v6788(VarCurr)).
% 297.20/295.43  all VarCurr (v6785(VarCurr)<->v6786(VarCurr)|v6787(VarCurr)).
% 297.20/295.43  all VarCurr (-v6787(VarCurr)<->v6788(VarCurr)).
% 297.20/295.43  all VarCurr (v6788(VarCurr)<->v6789(VarCurr)&v6792(VarCurr)).
% 297.20/295.43  all VarCurr (v6792(VarCurr)<->v6762(VarCurr,bitIndex0)|v6762(VarCurr,bitIndex1)).
% 297.20/295.43  all VarCurr (v6789(VarCurr)<->v6790(VarCurr)|v6791(VarCurr)).
% 297.20/295.43  all VarCurr (-v6791(VarCurr)<->v6762(VarCurr,bitIndex1)).
% 297.20/295.43  all VarCurr (-v6790(VarCurr)<->v6762(VarCurr,bitIndex0)).
% 297.20/295.43  all VarCurr (-v6786(VarCurr)<->v6762(VarCurr,bitIndex2)).
% 297.20/295.43  all VarCurr (-v6782(VarCurr)<->v6762(VarCurr,bitIndex3)).
% 297.20/295.43  all VarCurr (-v6758(VarCurr)<->v6759(VarCurr)).
% 297.20/295.43  all VarCurr (v6759(VarCurr)<->v6760(VarCurr)&v6775(VarCurr)).
% 297.20/295.43  all VarCurr (v6775(VarCurr)<->v6762(VarCurr,bitIndex11)|v6765(VarCurr)).
% 297.20/295.43  all VarCurr (v6760(VarCurr)<->v6761(VarCurr)|v6764(VarCurr)).
% 297.20/295.43  all VarCurr (-v6764(VarCurr)<->v6765(VarCurr)).
% 297.20/295.43  all VarCurr (v6765(VarCurr)<->v6766(VarCurr)&v6774(VarCurr)).
% 297.20/295.43  all VarCurr (v6774(VarCurr)<->v6762(VarCurr,bitIndex10)|v6769(VarCurr)).
% 297.20/295.43  all VarCurr (v6766(VarCurr)<->v6767(VarCurr)|v6768(VarCurr)).
% 297.20/295.43  all VarCurr (-v6768(VarCurr)<->v6769(VarCurr)).
% 297.20/295.43  all VarCurr (v6769(VarCurr)<->v6770(VarCurr)&v6773(VarCurr)).
% 297.20/295.43  all VarCurr (v6773(VarCurr)<->v6762(VarCurr,bitIndex8)|v6762(VarCurr,bitIndex9)).
% 297.20/295.43  all VarCurr (v6770(VarCurr)<->v6771(VarCurr)|v6772(VarCurr)).
% 297.20/295.43  all VarCurr (-v6772(VarCurr)<->v6762(VarCurr,bitIndex9)).
% 297.20/295.43  all VarCurr (-v6771(VarCurr)<->v6762(VarCurr,bitIndex8)).
% 297.20/295.43  all VarCurr (-v6767(VarCurr)<->v6762(VarCurr,bitIndex10)).
% 297.20/295.43  all VarCurr (-v6761(VarCurr)<->v6762(VarCurr,bitIndex11)).
% 297.20/295.43  all VarCurr (v6762(VarCurr,bitIndex0)<->v6763(VarCurr)).
% 297.20/295.43  all VarCurr B (range_5_1(B)-> (v6762(VarCurr,B)<->v212(VarCurr,B))).
% 297.20/295.43  all VarCurr B (range_11_6(B)-> (v6762(VarCurr,B)<->v212(VarCurr,B))).
% 297.20/295.43  all VarCurr (v6763(VarCurr)<-> -(v212(VarCurr,bitIndex0)<->v6750(VarCurr))).
% 297.20/295.43  all VarCurr (v6750(VarCurr)<->v6752(VarCurr)).
% 297.20/295.43  all VarCurr (v6752(VarCurr)<->v6754(VarCurr)).
% 297.20/295.43  all VarCurr (v212(VarCurr,bitIndex0)<->v545(VarCurr,bitIndex0)).
% 297.20/295.43  all VarCurr (v535(VarCurr,bitIndex0)<->v537(VarCurr,bitIndex0)).
% 297.20/295.43  all VarCurr (v537(VarCurr,bitIndex0)<->v539(VarCurr,bitIndex0)).
% 297.20/295.43  all VarCurr (v539(VarCurr,bitIndex0)<->v541(VarCurr,bitIndex0)).
% 297.20/295.43  all VarCurr (v541(VarCurr,bitIndex0)<->v543(VarCurr,bitIndex0)).
% 297.20/295.43  all VarCurr (v294(VarCurr,bitIndex0)<->v296(VarCurr,bitIndex0)).
% 297.20/295.43  all VarCurr (v296(VarCurr,bitIndex0)<->v298(VarCurr,bitIndex0)).
% 297.20/295.43  all VarCurr (v298(VarCurr,bitIndex0)<->v300(VarCurr,bitIndex0)).
% 297.20/295.43  all VarCurr (v300(VarCurr,bitIndex0)<->v523(VarCurr,bitIndex0)).
% 297.20/295.43  all VarCurr B (range_5_1(B)-> (v212(VarCurr,B)<->v545(VarCurr,B))).
% 297.20/295.43  all VarCurr B (range_5_1(B)-> (v535(VarCurr,B)<->v537(VarCurr,B))).
% 297.20/295.43  all VarCurr B (range_5_1(B)-> (v537(VarCurr,B)<->v539(VarCurr,B))).
% 297.20/295.43  all VarCurr B (range_5_1(B)-> (v539(VarCurr,B)<->v541(VarCurr,B))).
% 297.20/295.43  all VarCurr B (range_5_3(B)-> (v541(VarCurr,B)<->v543(VarCurr,B))).
% 297.20/295.43  all B (range_5_3(B)<->bitIndex3=B|bitIndex4=B|bitIndex5=B).
% 297.20/295.43  all VarCurr (v541(VarCurr,bitIndex2)<->v543(VarCurr,bitIndex2)).
% 297.20/295.43  all VarCurr (v541(VarCurr,bitIndex1)<->v543(VarCurr,bitIndex1)).
% 297.20/295.43  all VarCurr B (range_5_1(B)-> (v294(VarCurr,B)<->v296(VarCurr,B))).
% 297.20/295.43  all VarCurr B (range_5_1(B)-> (v296(VarCurr,B)<->v298(VarCurr,B))).
% 297.20/295.43  all VarCurr B (range_5_1(B)-> (v298(VarCurr,B)<->v300(VarCurr,B))).
% 297.20/295.43  all VarCurr B (range_5_1(B)-> (v300(VarCurr,B)<->v523(VarCurr,B))).
% 297.20/295.43  all B (range_5_1(B)<->bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B).
% 297.20/295.43  all VarCurr B (range_11_6(B)-> (v212(VarCurr,B)<->v545(VarCurr,B))).
% 297.20/295.43  all VarCurr B (range_11_6(B)-> (v535(VarCurr,B)<->v537(VarCurr,B))).
% 297.20/295.43  all VarCurr B (range_11_6(B)-> (v537(VarCurr,B)<->v539(VarCurr,B))).
% 297.20/295.43  all VarCurr B (range_11_6(B)-> (v539(VarCurr,B)<->v541(VarCurr,B))).
% 297.20/295.43  all VarCurr ((v541(VarCurr,bitIndex11)<->v543(VarCurr,bitIndex18))& (v541(VarCurr,bitIndex10)<->v543(VarCurr,bitIndex17))& (v541(VarCurr,bitIndex9)<->v543(VarCurr,bitIndex16))& (v541(VarCurr,bitIndex8)<->v543(VarCurr,bitIndex15))& (v541(VarCurr,bitIndex7)<->v543(VarCurr,bitIndex14))& (v541(VarCurr,bitIndex6)<->v543(VarCurr,bitIndex13))).
% 297.20/295.44  all VarCurr B (range_11_6(B)-> (v294(VarCurr,B)<->v296(VarCurr,B))).
% 297.20/295.44  all VarCurr B (range_11_6(B)-> (v296(VarCurr,B)<->v298(VarCurr,B))).
% 297.20/295.44  all VarCurr B (range_11_6(B)-> (v298(VarCurr,B)<->v300(VarCurr,B))).
% 297.20/295.44  all VarCurr B (range_11_6(B)-> (v300(VarCurr,B)<->v523(VarCurr,B))).
% 297.20/295.44  all VarCurr (v6746(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.20/295.44  all VarCurr (v6744(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.20/295.44  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6725(VarNext)-> (v6657(VarNext)<->v6657(VarCurr)))).
% 297.20/295.44  all VarNext (v6725(VarNext)-> (v6657(VarNext)<->v6735(VarNext))).
% 297.20/295.44  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6735(VarNext)<->v6733(VarCurr))).
% 297.20/295.44  all VarCurr (-v6736(VarCurr)-> (v6733(VarCurr)<->x552(VarCurr))).
% 297.20/295.44  all VarCurr (v6736(VarCurr)-> (v6733(VarCurr)<->v6663(VarCurr))).
% 297.20/295.44  all VarCurr (v6736(VarCurr)<->v6737(VarCurr)&v6738(VarCurr)).
% 297.20/295.44  all VarCurr (-v6738(VarCurr)<->v6661(VarCurr)).
% 297.20/295.44  all VarCurr (-v6737(VarCurr)<->v6659(VarCurr)).
% 297.20/295.44  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6725(VarNext)<->v6726(VarNext))).
% 297.20/295.44  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6726(VarNext)<->v6727(VarNext)&v6722(VarNext))).
% 297.20/295.44  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6727(VarNext)<->v6729(VarNext))).
% 297.20/295.44  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6729(VarNext)<->v6722(VarCurr))).
% 297.20/295.44  v6657(constB0)<->$F.
% 297.20/295.44  all VarCurr (v6722(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.20/295.44  all VarCurr (v6663(VarCurr)<->v202(VarCurr,bitIndex49)).
% 297.20/295.44  all VarCurr (v202(VarCurr,bitIndex49)<->v204(VarCurr,bitIndex49)).
% 297.20/295.44  all VarCurr (v204(VarCurr,bitIndex49)<->v546(VarCurr,bitIndex49)).
% 297.20/295.44  all VarCurr (-v6491(VarCurr,bitIndex1)-> (v212(VarCurr,bitIndex49)<->$T)).
% 297.20/295.44  all VarCurr (v6491(VarCurr,bitIndex1)-> (v212(VarCurr,bitIndex49)<->$F)).
% 297.20/295.44  all VarCurr (v6491(VarCurr,bitIndex1)<->v6666(VarCurr)&v6719(VarCurr)).
% 297.20/295.44  all VarCurr (v6719(VarCurr)<->v6668(VarCurr)|v6684(VarCurr)).
% 297.20/295.44  all VarCurr (v6666(VarCurr)<->v6667(VarCurr)|v6683(VarCurr)).
% 297.20/295.44  all VarCurr (-v6683(VarCurr)<->v6684(VarCurr)).
% 297.20/295.44  all VarCurr (v6684(VarCurr)<->v6685(VarCurr)&v6718(VarCurr)).
% 297.20/295.44  all VarCurr (v6718(VarCurr)<->v6687(VarCurr)|v6703(VarCurr)).
% 297.20/295.44  all VarCurr (v6685(VarCurr)<->v6686(VarCurr)|v6702(VarCurr)).
% 297.20/295.44  all VarCurr (-v6702(VarCurr)<->v6703(VarCurr)).
% 297.20/295.44  all VarCurr (v6703(VarCurr)<->v6704(VarCurr)&v6717(VarCurr)).
% 297.20/295.44  all VarCurr (v6717(VarCurr)<->v212(VarCurr,bitIndex19)|v6707(VarCurr)).
% 297.20/295.44  all VarCurr (v6704(VarCurr)<->v6705(VarCurr)|v6706(VarCurr)).
% 297.20/295.44  all VarCurr (-v6706(VarCurr)<->v6707(VarCurr)).
% 297.20/295.44  all VarCurr (v6707(VarCurr)<->v6708(VarCurr)&v6716(VarCurr)).
% 297.20/295.44  all VarCurr (v6716(VarCurr)<->v212(VarCurr,bitIndex18)|v6711(VarCurr)).
% 297.20/295.44  all VarCurr (v6708(VarCurr)<->v6709(VarCurr)|v6710(VarCurr)).
% 297.20/295.44  all VarCurr (-v6710(VarCurr)<->v6711(VarCurr)).
% 297.20/295.44  all VarCurr (v6711(VarCurr)<->v6712(VarCurr)&v6715(VarCurr)).
% 297.20/295.44  all VarCurr (v6715(VarCurr)<->v212(VarCurr,bitIndex16)|v212(VarCurr,bitIndex17)).
% 297.20/295.44  all VarCurr (v6712(VarCurr)<->v6713(VarCurr)|v6714(VarCurr)).
% 297.20/295.44  all VarCurr (-v6714(VarCurr)<->v212(VarCurr,bitIndex17)).
% 297.20/295.44  all VarCurr (-v6713(VarCurr)<->v212(VarCurr,bitIndex16)).
% 297.20/295.44  all VarCurr (-v6709(VarCurr)<->v212(VarCurr,bitIndex18)).
% 297.20/295.44  all VarCurr (-v6705(VarCurr)<->v212(VarCurr,bitIndex19)).
% 297.20/295.44  all VarCurr (-v6686(VarCurr)<->v6687(VarCurr)).
% 297.20/295.44  all VarCurr (v6687(VarCurr)<->v6688(VarCurr)&v6701(VarCurr)).
% 297.20/295.44  all VarCurr (v6701(VarCurr)<->v212(VarCurr,bitIndex15)|v6691(VarCurr)).
% 297.20/295.44  all VarCurr (v6688(VarCurr)<->v6689(VarCurr)|v6690(VarCurr)).
% 297.20/295.44  all VarCurr (-v6690(VarCurr)<->v6691(VarCurr)).
% 297.20/295.44  all VarCurr (v6691(VarCurr)<->v6692(VarCurr)&v6700(VarCurr)).
% 297.20/295.44  all VarCurr (v6700(VarCurr)<->v212(VarCurr,bitIndex14)|v6695(VarCurr)).
% 297.20/295.44  all VarCurr (v6692(VarCurr)<->v6693(VarCurr)|v6694(VarCurr)).
% 297.20/295.44  all VarCurr (-v6694(VarCurr)<->v6695(VarCurr)).
% 297.20/295.44  all VarCurr (v6695(VarCurr)<->v6696(VarCurr)&v6699(VarCurr)).
% 297.20/295.44  all VarCurr (v6699(VarCurr)<->v212(VarCurr,bitIndex12)|v212(VarCurr,bitIndex13)).
% 297.20/295.44  all VarCurr (v6696(VarCurr)<->v6697(VarCurr)|v6698(VarCurr)).
% 297.25/295.45  all VarCurr (-v6698(VarCurr)<->v212(VarCurr,bitIndex13)).
% 297.25/295.45  all VarCurr (-v6697(VarCurr)<->v212(VarCurr,bitIndex12)).
% 297.25/295.45  all VarCurr (-v6693(VarCurr)<->v212(VarCurr,bitIndex14)).
% 297.25/295.45  all VarCurr (-v6689(VarCurr)<->v212(VarCurr,bitIndex15)).
% 297.25/295.45  all VarCurr (-v6667(VarCurr)<->v6668(VarCurr)).
% 297.25/295.45  all VarCurr (v6668(VarCurr)<->v6669(VarCurr)&v6682(VarCurr)).
% 297.25/295.45  all VarCurr (v6682(VarCurr)<->v212(VarCurr,bitIndex23)|v6672(VarCurr)).
% 297.25/295.45  all VarCurr (v6669(VarCurr)<->v6670(VarCurr)|v6671(VarCurr)).
% 297.25/295.45  all VarCurr (-v6671(VarCurr)<->v6672(VarCurr)).
% 297.25/295.45  all VarCurr (v6672(VarCurr)<->v6673(VarCurr)&v6681(VarCurr)).
% 297.25/295.45  all VarCurr (v6681(VarCurr)<->v212(VarCurr,bitIndex22)|v6676(VarCurr)).
% 297.25/295.45  all VarCurr (v6673(VarCurr)<->v6674(VarCurr)|v6675(VarCurr)).
% 297.25/295.45  all VarCurr (-v6675(VarCurr)<->v6676(VarCurr)).
% 297.25/295.45  all VarCurr (v6676(VarCurr)<->v6677(VarCurr)&v6680(VarCurr)).
% 297.25/295.45  all VarCurr (v6680(VarCurr)<->v212(VarCurr,bitIndex20)|v212(VarCurr,bitIndex21)).
% 297.25/295.45  all VarCurr (v6677(VarCurr)<->v6678(VarCurr)|v6679(VarCurr)).
% 297.25/295.45  all VarCurr (-v6679(VarCurr)<->v212(VarCurr,bitIndex21)).
% 297.25/295.45  all VarCurr (-v6678(VarCurr)<->v212(VarCurr,bitIndex20)).
% 297.25/295.45  all VarCurr (-v6674(VarCurr)<->v212(VarCurr,bitIndex22)).
% 297.25/295.45  all VarCurr (-v6670(VarCurr)<->v212(VarCurr,bitIndex23)).
% 297.25/295.45  all VarCurr B (range_23_12(B)-> (v212(VarCurr,B)<->v545(VarCurr,B))).
% 297.25/295.45  all VarCurr B (range_23_12(B)-> (v535(VarCurr,B)<->v537(VarCurr,B))).
% 297.25/295.45  all VarCurr B (range_23_12(B)-> (v537(VarCurr,B)<->v539(VarCurr,B))).
% 297.25/295.45  all VarCurr B (range_23_12(B)-> (v539(VarCurr,B)<->v541(VarCurr,B))).
% 297.25/295.45  all VarCurr ((v541(VarCurr,bitIndex23)<->v543(VarCurr,bitIndex30))& (v541(VarCurr,bitIndex22)<->v543(VarCurr,bitIndex29))& (v541(VarCurr,bitIndex21)<->v543(VarCurr,bitIndex28))& (v541(VarCurr,bitIndex20)<->v543(VarCurr,bitIndex27))& (v541(VarCurr,bitIndex19)<->v543(VarCurr,bitIndex26))& (v541(VarCurr,bitIndex18)<->v543(VarCurr,bitIndex25))& (v541(VarCurr,bitIndex17)<->v543(VarCurr,bitIndex24))& (v541(VarCurr,bitIndex16)<->v543(VarCurr,bitIndex23))& (v541(VarCurr,bitIndex15)<->v543(VarCurr,bitIndex22))& (v541(VarCurr,bitIndex14)<->v543(VarCurr,bitIndex21))& (v541(VarCurr,bitIndex13)<->v543(VarCurr,bitIndex20))& (v541(VarCurr,bitIndex12)<->v543(VarCurr,bitIndex19))).
% 297.25/295.45  all VarCurr B (range_23_12(B)-> (v294(VarCurr,B)<->v296(VarCurr,B))).
% 297.25/295.45  all VarCurr B (range_23_12(B)-> (v296(VarCurr,B)<->v298(VarCurr,B))).
% 297.25/295.45  all VarCurr B (range_23_12(B)-> (v298(VarCurr,B)<->v300(VarCurr,B))).
% 297.25/295.45  all VarCurr B (range_23_12(B)-> (v300(VarCurr,B)<->v523(VarCurr,B))).
% 297.25/295.45  all B (range_23_12(B)<->bitIndex12=B|bitIndex13=B|bitIndex14=B|bitIndex15=B|bitIndex16=B|bitIndex17=B|bitIndex18=B|bitIndex19=B|bitIndex20=B|bitIndex21=B|bitIndex22=B|bitIndex23=B).
% 297.25/295.45  all VarCurr (v6661(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.25/295.45  all VarCurr (v6659(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.25/295.45  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6640(VarNext)-> (v6571(VarNext)<->v6571(VarCurr)))).
% 297.25/295.45  all VarNext (v6640(VarNext)-> (v6571(VarNext)<->v6650(VarNext))).
% 297.25/295.45  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6650(VarNext)<->v6648(VarCurr))).
% 297.25/295.45  all VarCurr (-v6651(VarCurr)-> (v6648(VarCurr)<->x552(VarCurr))).
% 297.25/295.45  all VarCurr (v6651(VarCurr)-> (v6648(VarCurr)<->v6577(VarCurr))).
% 297.25/295.45  all VarCurr (v6651(VarCurr)<->v6652(VarCurr)&v6653(VarCurr)).
% 297.25/295.45  all VarCurr (-v6653(VarCurr)<->v6575(VarCurr)).
% 297.25/295.45  all VarCurr (-v6652(VarCurr)<->v6573(VarCurr)).
% 297.25/295.45  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6640(VarNext)<->v6641(VarNext))).
% 297.25/295.45  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6641(VarNext)<->v6642(VarNext)&v6637(VarNext))).
% 297.25/295.45  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6642(VarNext)<->v6644(VarNext))).
% 297.25/295.45  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6644(VarNext)<->v6637(VarCurr))).
% 297.25/295.45  v6571(constB0)<->$F.
% 297.25/295.45  all VarCurr (v6637(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.25/295.45  all VarCurr (v6577(VarCurr)<->v202(VarCurr,bitIndex50)).
% 297.25/295.45  all VarCurr (v202(VarCurr,bitIndex50)<->v204(VarCurr,bitIndex50)).
% 297.25/295.45  all VarCurr (v204(VarCurr,bitIndex50)<->v546(VarCurr,bitIndex50)).
% 297.25/295.45  all VarCurr (-v6491(VarCurr,bitIndex2)-> (v212(VarCurr,bitIndex50)<->$T)).
% 297.25/295.45  all VarCurr (v6491(VarCurr,bitIndex2)-> (v212(VarCurr,bitIndex50)<->$F)).
% 297.25/295.45  all VarCurr (v6491(VarCurr,bitIndex2)<->v6580(VarCurr)&v6634(VarCurr)).
% 297.25/295.46  all VarCurr (v6634(VarCurr)<->v6582(VarCurr)|v6599(VarCurr)).
% 297.25/295.46  all VarCurr (v6580(VarCurr)<->v6581(VarCurr)|v6598(VarCurr)).
% 297.25/295.46  all VarCurr (-v6598(VarCurr)<->v6599(VarCurr)).
% 297.25/295.46  all VarCurr (v6599(VarCurr)<->v6600(VarCurr)&v6633(VarCurr)).
% 297.25/295.46  all VarCurr (v6633(VarCurr)<->v6602(VarCurr)|v6618(VarCurr)).
% 297.25/295.46  all VarCurr (v6600(VarCurr)<->v6601(VarCurr)|v6617(VarCurr)).
% 297.25/295.46  all VarCurr (-v6617(VarCurr)<->v6618(VarCurr)).
% 297.25/295.46  all VarCurr (v6618(VarCurr)<->v6619(VarCurr)&v6632(VarCurr)).
% 297.25/295.46  all VarCurr (v6632(VarCurr)<->v6585(VarCurr,bitIndex7)|v6622(VarCurr)).
% 297.25/295.46  all VarCurr (v6619(VarCurr)<->v6620(VarCurr)|v6621(VarCurr)).
% 297.25/295.46  all VarCurr (-v6621(VarCurr)<->v6622(VarCurr)).
% 297.25/295.46  all VarCurr (v6622(VarCurr)<->v6623(VarCurr)&v6631(VarCurr)).
% 297.25/295.46  all VarCurr (v6631(VarCurr)<->v6585(VarCurr,bitIndex6)|v6626(VarCurr)).
% 297.25/295.46  all VarCurr (v6623(VarCurr)<->v6624(VarCurr)|v6625(VarCurr)).
% 297.25/295.46  all VarCurr (-v6625(VarCurr)<->v6626(VarCurr)).
% 297.25/295.46  all VarCurr (v6626(VarCurr)<->v6627(VarCurr)&v6630(VarCurr)).
% 297.25/295.46  all VarCurr (v6630(VarCurr)<->v6585(VarCurr,bitIndex4)|v6585(VarCurr,bitIndex5)).
% 297.25/295.46  all VarCurr (v6627(VarCurr)<->v6628(VarCurr)|v6629(VarCurr)).
% 297.25/295.46  all VarCurr (-v6629(VarCurr)<->v6585(VarCurr,bitIndex5)).
% 297.25/295.46  all VarCurr (-v6628(VarCurr)<->v6585(VarCurr,bitIndex4)).
% 297.25/295.46  all VarCurr (-v6624(VarCurr)<->v6585(VarCurr,bitIndex6)).
% 297.25/295.46  all VarCurr (-v6620(VarCurr)<->v6585(VarCurr,bitIndex7)).
% 297.25/295.46  all VarCurr (-v6601(VarCurr)<->v6602(VarCurr)).
% 297.25/295.46  all VarCurr (v6602(VarCurr)<->v6603(VarCurr)&v6616(VarCurr)).
% 297.25/295.46  all VarCurr (v6616(VarCurr)<->v6585(VarCurr,bitIndex3)|v6606(VarCurr)).
% 297.25/295.46  all VarCurr (v6603(VarCurr)<->v6604(VarCurr)|v6605(VarCurr)).
% 297.25/295.46  all VarCurr (-v6605(VarCurr)<->v6606(VarCurr)).
% 297.25/295.46  all VarCurr (v6606(VarCurr)<->v6607(VarCurr)&v6615(VarCurr)).
% 297.25/295.46  all VarCurr (v6615(VarCurr)<->v6585(VarCurr,bitIndex2)|v6610(VarCurr)).
% 297.25/295.46  all VarCurr (v6607(VarCurr)<->v6608(VarCurr)|v6609(VarCurr)).
% 297.25/295.46  all VarCurr (-v6609(VarCurr)<->v6610(VarCurr)).
% 297.25/295.46  all VarCurr (v6610(VarCurr)<->v6611(VarCurr)&v6614(VarCurr)).
% 297.25/295.46  all VarCurr (v6614(VarCurr)<->v6585(VarCurr,bitIndex0)|v6585(VarCurr,bitIndex1)).
% 297.25/295.46  all VarCurr (v6611(VarCurr)<->v6612(VarCurr)|v6613(VarCurr)).
% 297.25/295.46  all VarCurr (-v6613(VarCurr)<->v6585(VarCurr,bitIndex1)).
% 297.25/295.46  all VarCurr (-v6612(VarCurr)<->v6585(VarCurr,bitIndex0)).
% 297.25/295.46  all VarCurr (-v6608(VarCurr)<->v6585(VarCurr,bitIndex2)).
% 297.25/295.46  all VarCurr (-v6604(VarCurr)<->v6585(VarCurr,bitIndex3)).
% 297.25/295.46  all VarCurr (-v6581(VarCurr)<->v6582(VarCurr)).
% 297.25/295.46  all VarCurr (v6582(VarCurr)<->v6583(VarCurr)&v6597(VarCurr)).
% 297.25/295.46  all VarCurr (v6597(VarCurr)<->v6585(VarCurr,bitIndex11)|v6587(VarCurr)).
% 297.25/295.46  all VarCurr (v6583(VarCurr)<->v6584(VarCurr)|v6586(VarCurr)).
% 297.25/295.46  all VarCurr (-v6586(VarCurr)<->v6587(VarCurr)).
% 297.25/295.46  all VarCurr (v6587(VarCurr)<->v6588(VarCurr)&v6596(VarCurr)).
% 297.25/295.46  all VarCurr (v6596(VarCurr)<->v6585(VarCurr,bitIndex10)|v6591(VarCurr)).
% 297.25/295.46  all VarCurr (v6588(VarCurr)<->v6589(VarCurr)|v6590(VarCurr)).
% 297.25/295.46  all VarCurr (-v6590(VarCurr)<->v6591(VarCurr)).
% 297.25/295.46  all VarCurr (v6591(VarCurr)<->v6592(VarCurr)&v6595(VarCurr)).
% 297.25/295.46  all VarCurr (v6595(VarCurr)<->v6585(VarCurr,bitIndex8)|v6585(VarCurr,bitIndex9)).
% 297.25/295.46  all VarCurr (v6592(VarCurr)<->v6593(VarCurr)|v6594(VarCurr)).
% 297.25/295.46  all VarCurr (-v6594(VarCurr)<->v6585(VarCurr,bitIndex9)).
% 297.25/295.46  all VarCurr (-v6593(VarCurr)<->v6585(VarCurr,bitIndex8)).
% 297.25/295.46  all VarCurr (-v6589(VarCurr)<->v6585(VarCurr,bitIndex10)).
% 297.25/295.46  all VarCurr (-v6584(VarCurr)<->v6585(VarCurr,bitIndex11)).
% 297.25/295.46  all VarCurr ((v6585(VarCurr,bitIndex7)<->v212(VarCurr,bitIndex31))& (v6585(VarCurr,bitIndex6)<->v212(VarCurr,bitIndex30))& (v6585(VarCurr,bitIndex5)<->v212(VarCurr,bitIndex29))& (v6585(VarCurr,bitIndex4)<->v212(VarCurr,bitIndex28))& (v6585(VarCurr,bitIndex3)<->v212(VarCurr,bitIndex27))& (v6585(VarCurr,bitIndex2)<->v212(VarCurr,bitIndex26))& (v6585(VarCurr,bitIndex1)<->v212(VarCurr,bitIndex25))& (v6585(VarCurr,bitIndex0)<->v212(VarCurr,bitIndex24))).
% 297.25/295.46  all VarCurr ((v6585(VarCurr,bitIndex11)<->v212(VarCurr,bitIndex35))& (v6585(VarCurr,bitIndex10)<->v212(VarCurr,bitIndex34))& (v6585(VarCurr,bitIndex9)<->v212(VarCurr,bitIndex33))& (v6585(VarCurr,bitIndex8)<->v212(VarCurr,bitIndex32))).
% 297.25/295.46  all VarCurr (v212(VarCurr,bitIndex31)<->v545(VarCurr,bitIndex31)).
% 297.25/295.46  all VarCurr B (range_29_24(B)-> (v212(VarCurr,B)<->v545(VarCurr,B))).
% 297.25/295.47  all VarCurr (v535(VarCurr,bitIndex31)<->v537(VarCurr,bitIndex31)).
% 297.25/295.47  all VarCurr (v537(VarCurr,bitIndex31)<->v539(VarCurr,bitIndex31)).
% 297.25/295.47  all VarCurr (v539(VarCurr,bitIndex31)<->v541(VarCurr,bitIndex31)).
% 297.25/295.47  all VarCurr (v541(VarCurr,bitIndex31)<->v543(VarCurr,bitIndex38)).
% 297.25/295.47  all VarCurr (v294(VarCurr,bitIndex31)<->v296(VarCurr,bitIndex31)).
% 297.25/295.47  all VarCurr (v296(VarCurr,bitIndex31)<->v298(VarCurr,bitIndex31)).
% 297.25/295.47  all VarCurr (v298(VarCurr,bitIndex31)<->v300(VarCurr,bitIndex31)).
% 297.25/295.47  all VarCurr (v300(VarCurr,bitIndex31)<->v523(VarCurr,bitIndex31)).
% 297.25/295.47  all VarCurr B (range_29_24(B)-> (v535(VarCurr,B)<->v537(VarCurr,B))).
% 297.25/295.47  all VarCurr B (range_29_24(B)-> (v537(VarCurr,B)<->v539(VarCurr,B))).
% 297.25/295.47  all VarCurr B (range_29_24(B)-> (v539(VarCurr,B)<->v541(VarCurr,B))).
% 297.25/295.47  all VarCurr ((v541(VarCurr,bitIndex29)<->v543(VarCurr,bitIndex36))& (v541(VarCurr,bitIndex28)<->v543(VarCurr,bitIndex35))& (v541(VarCurr,bitIndex27)<->v543(VarCurr,bitIndex34))& (v541(VarCurr,bitIndex26)<->v543(VarCurr,bitIndex33))& (v541(VarCurr,bitIndex25)<->v543(VarCurr,bitIndex32))& (v541(VarCurr,bitIndex24)<->v543(VarCurr,bitIndex31))).
% 297.25/295.47  all VarCurr B (range_29_24(B)-> (v294(VarCurr,B)<->v296(VarCurr,B))).
% 297.25/295.47  all VarCurr B (range_29_24(B)-> (v296(VarCurr,B)<->v298(VarCurr,B))).
% 297.25/295.47  all VarCurr B (range_29_24(B)-> (v298(VarCurr,B)<->v300(VarCurr,B))).
% 297.25/295.47  all VarCurr B (range_29_24(B)-> (v300(VarCurr,B)<->v523(VarCurr,B))).
% 297.25/295.47  all B (range_29_24(B)<->bitIndex24=B|bitIndex25=B|bitIndex26=B|bitIndex27=B|bitIndex28=B|bitIndex29=B).
% 297.25/295.47  all VarCurr B (range_35_32(B)-> (v212(VarCurr,B)<->v545(VarCurr,B))).
% 297.25/295.47  all VarCurr B (range_35_32(B)-> (v535(VarCurr,B)<->v537(VarCurr,B))).
% 297.25/295.47  all VarCurr B (range_35_32(B)-> (v537(VarCurr,B)<->v539(VarCurr,B))).
% 297.25/295.47  all VarCurr B (range_35_32(B)-> (v539(VarCurr,B)<->v541(VarCurr,B))).
% 297.25/295.47  all VarCurr ((v541(VarCurr,bitIndex35)<->v543(VarCurr,bitIndex51))& (v541(VarCurr,bitIndex34)<->v543(VarCurr,bitIndex50))& (v541(VarCurr,bitIndex33)<->v543(VarCurr,bitIndex49))& (v541(VarCurr,bitIndex32)<->v543(VarCurr,bitIndex48))).
% 297.25/295.47  all VarCurr B (range_35_32(B)-> (v294(VarCurr,B)<->v296(VarCurr,B))).
% 297.25/295.47  all VarCurr B (range_35_32(B)-> (v296(VarCurr,B)<->v298(VarCurr,B))).
% 297.25/295.47  all VarCurr B (range_35_32(B)-> (v298(VarCurr,B)<->v300(VarCurr,B))).
% 297.25/295.47  all VarCurr B (range_35_32(B)-> (v300(VarCurr,B)<->v523(VarCurr,B))).
% 297.25/295.47  all B (range_35_32(B)<->bitIndex32=B|bitIndex33=B|bitIndex34=B|bitIndex35=B).
% 297.25/295.47  all VarCurr (v6575(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.25/295.47  all VarCurr (v6573(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.25/295.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6554(VarNext)-> (v6483(VarNext)<->v6483(VarCurr)))).
% 297.25/295.47  all VarNext (v6554(VarNext)-> (v6483(VarNext)<->v6564(VarNext))).
% 297.25/295.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6564(VarNext)<->v6562(VarCurr))).
% 297.25/295.47  all VarCurr (-v6565(VarCurr)-> (v6562(VarCurr)<->x552(VarCurr))).
% 297.25/295.47  all VarCurr (v6565(VarCurr)-> (v6562(VarCurr)<->v6489(VarCurr))).
% 297.25/295.47  all VarCurr (v6565(VarCurr)<->v6566(VarCurr)&v6567(VarCurr)).
% 297.25/295.47  all VarCurr (-v6567(VarCurr)<->v6487(VarCurr)).
% 297.25/295.47  all VarCurr (-v6566(VarCurr)<->v6485(VarCurr)).
% 297.25/295.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6554(VarNext)<->v6555(VarNext))).
% 297.25/295.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6555(VarNext)<->v6556(VarNext)&v6551(VarNext))).
% 297.25/295.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6556(VarNext)<->v6558(VarNext))).
% 297.25/295.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6558(VarNext)<->v6551(VarCurr))).
% 297.25/295.47  v6483(constB0)<->$F.
% 297.25/295.47  all VarCurr (v6551(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.25/295.47  all VarCurr (v6489(VarCurr)<->v202(VarCurr,bitIndex51)).
% 297.25/295.47  all VarCurr (v202(VarCurr,bitIndex51)<->v204(VarCurr,bitIndex51)).
% 297.25/295.47  all VarCurr (v204(VarCurr,bitIndex51)<->v546(VarCurr,bitIndex51)).
% 297.25/295.47  all VarCurr (-v6491(VarCurr,bitIndex3)-> (v212(VarCurr,bitIndex51)<->$T)).
% 297.25/295.47  all VarCurr (v6491(VarCurr,bitIndex3)-> (v212(VarCurr,bitIndex51)<->$F)).
% 297.25/295.47  all VarCurr (v6491(VarCurr,bitIndex3)<->v6494(VarCurr)&v6548(VarCurr)).
% 297.25/295.47  all VarCurr (v6548(VarCurr)<->v6496(VarCurr)|v6513(VarCurr)).
% 297.25/295.47  all VarCurr (v6494(VarCurr)<->v6495(VarCurr)|v6512(VarCurr)).
% 297.25/295.47  all VarCurr (-v6512(VarCurr)<->v6513(VarCurr)).
% 297.25/295.47  all VarCurr (v6513(VarCurr)<->v6514(VarCurr)&v6547(VarCurr)).
% 297.25/295.48  all VarCurr (v6547(VarCurr)<->v6516(VarCurr)|v6532(VarCurr)).
% 297.25/295.48  all VarCurr (v6514(VarCurr)<->v6515(VarCurr)|v6531(VarCurr)).
% 297.25/295.48  all VarCurr (-v6531(VarCurr)<->v6532(VarCurr)).
% 297.25/295.48  all VarCurr (v6532(VarCurr)<->v6533(VarCurr)&v6546(VarCurr)).
% 297.25/295.48  all VarCurr (v6546(VarCurr)<->v6499(VarCurr,bitIndex7)|v6536(VarCurr)).
% 297.25/295.48  all VarCurr (v6533(VarCurr)<->v6534(VarCurr)|v6535(VarCurr)).
% 297.25/295.48  all VarCurr (-v6535(VarCurr)<->v6536(VarCurr)).
% 297.25/295.48  all VarCurr (v6536(VarCurr)<->v6537(VarCurr)&v6545(VarCurr)).
% 297.25/295.48  all VarCurr (v6545(VarCurr)<->v6499(VarCurr,bitIndex6)|v6540(VarCurr)).
% 297.25/295.48  all VarCurr (v6537(VarCurr)<->v6538(VarCurr)|v6539(VarCurr)).
% 297.25/295.48  all VarCurr (-v6539(VarCurr)<->v6540(VarCurr)).
% 297.25/295.48  all VarCurr (v6540(VarCurr)<->v6541(VarCurr)&v6544(VarCurr)).
% 297.25/295.48  all VarCurr (v6544(VarCurr)<->v6499(VarCurr,bitIndex4)|v6499(VarCurr,bitIndex5)).
% 297.25/295.48  all VarCurr (v6541(VarCurr)<->v6542(VarCurr)|v6543(VarCurr)).
% 297.25/295.48  all VarCurr (-v6543(VarCurr)<->v6499(VarCurr,bitIndex5)).
% 297.25/295.48  all VarCurr (-v6542(VarCurr)<->v6499(VarCurr,bitIndex4)).
% 297.25/295.48  all VarCurr (-v6538(VarCurr)<->v6499(VarCurr,bitIndex6)).
% 297.25/295.48  all VarCurr (-v6534(VarCurr)<->v6499(VarCurr,bitIndex7)).
% 297.25/295.48  all VarCurr (-v6515(VarCurr)<->v6516(VarCurr)).
% 297.25/295.48  all VarCurr (v6516(VarCurr)<->v6517(VarCurr)&v6530(VarCurr)).
% 297.25/295.48  all VarCurr (v6530(VarCurr)<->v6499(VarCurr,bitIndex3)|v6520(VarCurr)).
% 297.25/295.48  all VarCurr (v6517(VarCurr)<->v6518(VarCurr)|v6519(VarCurr)).
% 297.25/295.48  all VarCurr (-v6519(VarCurr)<->v6520(VarCurr)).
% 297.25/295.48  all VarCurr (v6520(VarCurr)<->v6521(VarCurr)&v6529(VarCurr)).
% 297.25/295.48  all VarCurr (v6529(VarCurr)<->v6499(VarCurr,bitIndex2)|v6524(VarCurr)).
% 297.25/295.48  all VarCurr (v6521(VarCurr)<->v6522(VarCurr)|v6523(VarCurr)).
% 297.25/295.48  all VarCurr (-v6523(VarCurr)<->v6524(VarCurr)).
% 297.25/295.48  all VarCurr (v6524(VarCurr)<->v6525(VarCurr)&v6528(VarCurr)).
% 297.25/295.48  all VarCurr (v6528(VarCurr)<->v6499(VarCurr,bitIndex0)|v6499(VarCurr,bitIndex1)).
% 297.25/295.48  all VarCurr (v6525(VarCurr)<->v6526(VarCurr)|v6527(VarCurr)).
% 297.25/295.48  all VarCurr (-v6527(VarCurr)<->v6499(VarCurr,bitIndex1)).
% 297.25/295.48  all VarCurr (-v6526(VarCurr)<->v6499(VarCurr,bitIndex0)).
% 297.25/295.48  all VarCurr (-v6522(VarCurr)<->v6499(VarCurr,bitIndex2)).
% 297.25/295.48  all VarCurr (-v6518(VarCurr)<->v6499(VarCurr,bitIndex3)).
% 297.25/295.48  all VarCurr (-v6495(VarCurr)<->v6496(VarCurr)).
% 297.25/295.48  all VarCurr (v6496(VarCurr)<->v6497(VarCurr)&v6511(VarCurr)).
% 297.25/295.48  all VarCurr (v6511(VarCurr)<->v6499(VarCurr,bitIndex11)|v6501(VarCurr)).
% 297.25/295.48  all VarCurr (v6497(VarCurr)<->v6498(VarCurr)|v6500(VarCurr)).
% 297.25/295.48  all VarCurr (-v6500(VarCurr)<->v6501(VarCurr)).
% 297.25/295.48  all VarCurr (v6501(VarCurr)<->v6502(VarCurr)&v6510(VarCurr)).
% 297.25/295.48  all VarCurr (v6510(VarCurr)<->v6499(VarCurr,bitIndex10)|v6505(VarCurr)).
% 297.25/295.48  all VarCurr (v6502(VarCurr)<->v6503(VarCurr)|v6504(VarCurr)).
% 297.25/295.48  all VarCurr (-v6504(VarCurr)<->v6505(VarCurr)).
% 297.25/295.48  all VarCurr (v6505(VarCurr)<->v6506(VarCurr)&v6509(VarCurr)).
% 297.25/295.48  all VarCurr (v6509(VarCurr)<->v6499(VarCurr,bitIndex8)|v6499(VarCurr,bitIndex9)).
% 297.25/295.48  all VarCurr (v6506(VarCurr)<->v6507(VarCurr)|v6508(VarCurr)).
% 297.25/295.48  all VarCurr (-v6508(VarCurr)<->v6499(VarCurr,bitIndex9)).
% 297.25/295.48  all VarCurr (-v6507(VarCurr)<->v6499(VarCurr,bitIndex8)).
% 297.25/295.48  all VarCurr (-v6503(VarCurr)<->v6499(VarCurr,bitIndex10)).
% 297.25/295.48  all VarCurr (-v6498(VarCurr)<->v6499(VarCurr,bitIndex11)).
% 297.25/295.48  all VarCurr ((v6499(VarCurr,bitIndex11)<->v212(VarCurr,bitIndex47))& (v6499(VarCurr,bitIndex10)<->v212(VarCurr,bitIndex46))& (v6499(VarCurr,bitIndex9)<->v212(VarCurr,bitIndex45))& (v6499(VarCurr,bitIndex8)<->v212(VarCurr,bitIndex44))& (v6499(VarCurr,bitIndex7)<->v212(VarCurr,bitIndex43))& (v6499(VarCurr,bitIndex6)<->v212(VarCurr,bitIndex42))& (v6499(VarCurr,bitIndex5)<->v212(VarCurr,bitIndex41))& (v6499(VarCurr,bitIndex4)<->v212(VarCurr,bitIndex40))& (v6499(VarCurr,bitIndex3)<->v212(VarCurr,bitIndex39))& (v6499(VarCurr,bitIndex2)<->v212(VarCurr,bitIndex38))& (v6499(VarCurr,bitIndex1)<->v212(VarCurr,bitIndex37))& (v6499(VarCurr,bitIndex0)<->v212(VarCurr,bitIndex36))).
% 297.25/295.48  all VarCurr B (range_47_36(B)-> (v212(VarCurr,B)<->v545(VarCurr,B))).
% 297.25/295.48  all VarCurr B (range_47_36(B)-> (v535(VarCurr,B)<->v537(VarCurr,B))).
% 297.25/295.48  all VarCurr B (range_47_36(B)-> (v537(VarCurr,B)<->v539(VarCurr,B))).
% 297.25/295.48  all VarCurr B (range_47_36(B)-> (v539(VarCurr,B)<->v541(VarCurr,B))).
% 297.25/295.48  all VarCurr ((v541(VarCurr,bitIndex47)<->v543(VarCurr,bitIndex63))& (v541(VarCurr,bitIndex46)<->v543(VarCurr,bitIndex62))& (v541(VarCurr,bitIndex45)<->v543(VarCurr,bitIndex61))& (v541(VarCurr,bitIndex44)<->v543(VarCurr,bitIndex60))& (v541(VarCurr,bitIndex43)<->v543(VarCurr,bitIndex59))& (v541(VarCurr,bitIndex42)<->v543(VarCurr,bitIndex58))& (v541(VarCurr,bitIndex41)<->v543(VarCurr,bitIndex57))& (v541(VarCurr,bitIndex40)<->v543(VarCurr,bitIndex56))& (v541(VarCurr,bitIndex39)<->v543(VarCurr,bitIndex55))& (v541(VarCurr,bitIndex38)<->v543(VarCurr,bitIndex54))& (v541(VarCurr,bitIndex37)<->v543(VarCurr,bitIndex53))& (v541(VarCurr,bitIndex36)<->v543(VarCurr,bitIndex52))).
% 297.25/295.48  all VarCurr B (range_47_36(B)-> (v294(VarCurr,B)<->v296(VarCurr,B))).
% 297.25/295.48  all VarCurr B (range_47_36(B)-> (v296(VarCurr,B)<->v298(VarCurr,B))).
% 297.25/295.48  all VarCurr B (range_47_36(B)-> (v298(VarCurr,B)<->v300(VarCurr,B))).
% 297.25/295.48  all VarCurr B (range_47_36(B)-> (v300(VarCurr,B)<->v523(VarCurr,B))).
% 297.25/295.48  all B (range_47_36(B)<->bitIndex36=B|bitIndex37=B|bitIndex38=B|bitIndex39=B|bitIndex40=B|bitIndex41=B|bitIndex42=B|bitIndex43=B|bitIndex44=B|bitIndex45=B|bitIndex46=B|bitIndex47=B).
% 297.25/295.48  all VarCurr (v6487(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.25/295.48  all VarCurr (v6485(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.25/295.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6466(VarNext)-> (v6455(VarNext)<->v6455(VarCurr)))).
% 297.25/295.48  all VarNext (v6466(VarNext)-> (v6455(VarNext)<->v6476(VarNext))).
% 297.25/295.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6476(VarNext)<->v6474(VarCurr))).
% 297.25/295.48  all VarCurr (-v6477(VarCurr)-> (v6474(VarCurr)<->x552(VarCurr))).
% 297.25/295.48  all VarCurr (v6477(VarCurr)-> (v6474(VarCurr)<->v6461(VarCurr))).
% 297.25/295.48  all VarCurr (v6477(VarCurr)<->v6478(VarCurr)&v6479(VarCurr)).
% 297.25/295.48  all VarCurr (-v6479(VarCurr)<->v6459(VarCurr)).
% 297.25/295.48  all VarCurr (-v6478(VarCurr)<->v6457(VarCurr)).
% 297.25/295.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6466(VarNext)<->v6467(VarNext))).
% 297.25/295.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6467(VarNext)<->v6468(VarNext)&v6463(VarNext))).
% 297.25/295.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6468(VarNext)<->v6470(VarNext))).
% 297.25/295.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6470(VarNext)<->v6463(VarCurr))).
% 297.25/295.48  v6455(constB0)<->$F.
% 297.25/295.48  all VarCurr (v6463(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.25/295.48  all VarCurr (v6461(VarCurr)<->v202(VarCurr,bitIndex52)).
% 297.25/295.48  all VarCurr (v202(VarCurr,bitIndex52)<->v204(VarCurr,bitIndex52)).
% 297.25/295.48  all VarCurr (v204(VarCurr,bitIndex52)<->v546(VarCurr,bitIndex52)).
% 297.25/295.48  all VarCurr (v206(VarCurr,bitIndex4)<->v208(VarCurr,bitIndex4)).
% 297.25/295.48  all VarCurr (v208(VarCurr,bitIndex4)<->v210(VarCurr,bitIndex4)).
% 297.25/295.48  all VarCurr (v6459(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.25/295.48  all VarCurr (v6457(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.25/295.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6438(VarNext)-> (v6427(VarNext)<->v6427(VarCurr)))).
% 297.25/295.48  all VarNext (v6438(VarNext)-> (v6427(VarNext)<->v6448(VarNext))).
% 297.25/295.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6448(VarNext)<->v6446(VarCurr))).
% 297.25/295.48  all VarCurr (-v6449(VarCurr)-> (v6446(VarCurr)<->x552(VarCurr))).
% 297.25/295.48  all VarCurr (v6449(VarCurr)-> (v6446(VarCurr)<->v6433(VarCurr))).
% 297.25/295.48  all VarCurr (v6449(VarCurr)<->v6450(VarCurr)&v6451(VarCurr)).
% 297.25/295.48  all VarCurr (-v6451(VarCurr)<->v6431(VarCurr)).
% 297.25/295.48  all VarCurr (-v6450(VarCurr)<->v6429(VarCurr)).
% 297.25/295.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6438(VarNext)<->v6439(VarNext))).
% 297.25/295.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6439(VarNext)<->v6440(VarNext)&v6435(VarNext))).
% 297.25/295.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6440(VarNext)<->v6442(VarNext))).
% 297.25/295.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6442(VarNext)<->v6435(VarCurr))).
% 297.25/295.48  v6427(constB0)<->$F.
% 297.25/295.48  all VarCurr (v6435(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.25/295.48  all VarCurr (v6433(VarCurr)<->v202(VarCurr,bitIndex53)).
% 297.25/295.48  all VarCurr (v202(VarCurr,bitIndex53)<->v204(VarCurr,bitIndex53)).
% 297.25/295.48  all VarCurr (v204(VarCurr,bitIndex53)<->v546(VarCurr,bitIndex53)).
% 297.25/295.48  all VarCurr (v206(VarCurr,bitIndex5)<->v208(VarCurr,bitIndex5)).
% 297.25/295.48  all VarCurr (v208(VarCurr,bitIndex5)<->v210(VarCurr,bitIndex5)).
% 297.25/295.48  all VarCurr (v6431(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.25/295.48  all VarCurr (v6429(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.25/295.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6410(VarNext)-> (v6399(VarNext)<->v6399(VarCurr)))).
% 297.25/295.49  all VarNext (v6410(VarNext)-> (v6399(VarNext)<->v6420(VarNext))).
% 297.25/295.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6420(VarNext)<->v6418(VarCurr))).
% 297.25/295.49  all VarCurr (-v6421(VarCurr)-> (v6418(VarCurr)<->x552(VarCurr))).
% 297.25/295.49  all VarCurr (v6421(VarCurr)-> (v6418(VarCurr)<->v6405(VarCurr))).
% 297.25/295.49  all VarCurr (v6421(VarCurr)<->v6422(VarCurr)&v6423(VarCurr)).
% 297.25/295.49  all VarCurr (-v6423(VarCurr)<->v6403(VarCurr)).
% 297.25/295.49  all VarCurr (-v6422(VarCurr)<->v6401(VarCurr)).
% 297.25/295.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6410(VarNext)<->v6411(VarNext))).
% 297.25/295.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6411(VarNext)<->v6412(VarNext)&v6407(VarNext))).
% 297.25/295.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6412(VarNext)<->v6414(VarNext))).
% 297.25/295.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6414(VarNext)<->v6407(VarCurr))).
% 297.25/295.49  v6399(constB0)<->$F.
% 297.25/295.49  all VarCurr (v6407(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.25/295.49  all VarCurr (v6405(VarCurr)<->v202(VarCurr,bitIndex54)).
% 297.25/295.49  all VarCurr (v202(VarCurr,bitIndex54)<->v204(VarCurr,bitIndex54)).
% 297.25/295.49  all VarCurr (v204(VarCurr,bitIndex54)<->v546(VarCurr,bitIndex54)).
% 297.25/295.49  all VarCurr (v6403(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.25/295.49  all VarCurr (v6401(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.25/295.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6382(VarNext)-> (v6371(VarNext)<->v6371(VarCurr)))).
% 297.25/295.49  all VarNext (v6382(VarNext)-> (v6371(VarNext)<->v6392(VarNext))).
% 297.25/295.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6392(VarNext)<->v6390(VarCurr))).
% 297.25/295.49  all VarCurr (-v6393(VarCurr)-> (v6390(VarCurr)<->x552(VarCurr))).
% 297.25/295.49  all VarCurr (v6393(VarCurr)-> (v6390(VarCurr)<->v6377(VarCurr))).
% 297.25/295.49  all VarCurr (v6393(VarCurr)<->v6394(VarCurr)&v6395(VarCurr)).
% 297.25/295.49  all VarCurr (-v6395(VarCurr)<->v6375(VarCurr)).
% 297.25/295.49  all VarCurr (-v6394(VarCurr)<->v6373(VarCurr)).
% 297.25/295.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6382(VarNext)<->v6383(VarNext))).
% 297.25/295.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6383(VarNext)<->v6384(VarNext)&v6379(VarNext))).
% 297.25/295.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6384(VarNext)<->v6386(VarNext))).
% 297.25/295.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6386(VarNext)<->v6379(VarCurr))).
% 297.25/295.49  v6371(constB0)<->$F.
% 297.25/295.49  all VarCurr (v6379(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.25/295.49  all VarCurr (v6377(VarCurr)<->v202(VarCurr,bitIndex55)).
% 297.25/295.49  all VarCurr (v202(VarCurr,bitIndex55)<->v204(VarCurr,bitIndex55)).
% 297.25/295.49  all VarCurr (v204(VarCurr,bitIndex55)<->v546(VarCurr,bitIndex55)).
% 297.25/295.49  all VarCurr (v206(VarCurr,bitIndex7)<->v208(VarCurr,bitIndex7)).
% 297.25/295.49  all VarCurr (v208(VarCurr,bitIndex7)<->v210(VarCurr,bitIndex7)).
% 297.25/295.49  all VarCurr (v6375(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.25/295.49  all VarCurr (v6373(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.25/295.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6354(VarNext)-> (v6343(VarNext)<->v6343(VarCurr)))).
% 297.25/295.49  all VarNext (v6354(VarNext)-> (v6343(VarNext)<->v6364(VarNext))).
% 297.25/295.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6364(VarNext)<->v6362(VarCurr))).
% 297.25/295.49  all VarCurr (-v6365(VarCurr)-> (v6362(VarCurr)<->x552(VarCurr))).
% 297.25/295.49  all VarCurr (v6365(VarCurr)-> (v6362(VarCurr)<->v6349(VarCurr))).
% 297.25/295.49  all VarCurr (v6365(VarCurr)<->v6366(VarCurr)&v6367(VarCurr)).
% 297.25/295.49  all VarCurr (-v6367(VarCurr)<->v6347(VarCurr)).
% 297.25/295.49  all VarCurr (-v6366(VarCurr)<->v6345(VarCurr)).
% 297.25/295.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6354(VarNext)<->v6355(VarNext))).
% 297.25/295.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6355(VarNext)<->v6356(VarNext)&v6351(VarNext))).
% 297.25/295.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6356(VarNext)<->v6358(VarNext))).
% 297.25/295.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6358(VarNext)<->v6351(VarCurr))).
% 297.25/295.49  v6343(constB0)<->$F.
% 297.25/295.49  all VarCurr (v6351(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.25/295.49  all VarCurr (v6349(VarCurr)<->v202(VarCurr,bitIndex56)).
% 297.25/295.49  all VarCurr (v202(VarCurr,bitIndex56)<->v204(VarCurr,bitIndex56)).
% 297.25/295.49  all VarCurr (v204(VarCurr,bitIndex56)<->v546(VarCurr,bitIndex56)).
% 297.25/295.49  all VarCurr (v206(VarCurr,bitIndex0)<->v208(VarCurr,bitIndex0)).
% 297.25/295.49  all VarCurr (v208(VarCurr,bitIndex0)<->v210(VarCurr,bitIndex0)).
% 297.25/295.49  all VarCurr (v6347(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.25/295.49  all VarCurr (v6345(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.25/295.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6326(VarNext)-> (v6315(VarNext)<->v6315(VarCurr)))).
% 297.25/295.50  all VarNext (v6326(VarNext)-> (v6315(VarNext)<->v6336(VarNext))).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6336(VarNext)<->v6334(VarCurr))).
% 297.25/295.50  all VarCurr (-v6337(VarCurr)-> (v6334(VarCurr)<->x552(VarCurr))).
% 297.25/295.50  all VarCurr (v6337(VarCurr)-> (v6334(VarCurr)<->v6321(VarCurr))).
% 297.25/295.50  all VarCurr (v6337(VarCurr)<->v6338(VarCurr)&v6339(VarCurr)).
% 297.25/295.50  all VarCurr (-v6339(VarCurr)<->v6319(VarCurr)).
% 297.25/295.50  all VarCurr (-v6338(VarCurr)<->v6317(VarCurr)).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6326(VarNext)<->v6327(VarNext))).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6327(VarNext)<->v6328(VarNext)&v6323(VarNext))).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6328(VarNext)<->v6330(VarNext))).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6330(VarNext)<->v6323(VarCurr))).
% 297.25/295.50  v6315(constB0)<->$F.
% 297.25/295.50  all VarCurr (v6323(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.25/295.50  all VarCurr (v6321(VarCurr)<->v202(VarCurr,bitIndex57)).
% 297.25/295.50  all VarCurr (v202(VarCurr,bitIndex57)<->v204(VarCurr,bitIndex57)).
% 297.25/295.50  all VarCurr (v204(VarCurr,bitIndex57)<->v546(VarCurr,bitIndex57)).
% 297.25/295.50  all VarCurr (v206(VarCurr,bitIndex1)<->v208(VarCurr,bitIndex1)).
% 297.25/295.50  all VarCurr (v208(VarCurr,bitIndex1)<->v210(VarCurr,bitIndex1)).
% 297.25/295.50  all VarCurr (v6319(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.25/295.50  all VarCurr (v6317(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6298(VarNext)-> (v6287(VarNext)<->v6287(VarCurr)))).
% 297.25/295.50  all VarNext (v6298(VarNext)-> (v6287(VarNext)<->v6308(VarNext))).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6308(VarNext)<->v6306(VarCurr))).
% 297.25/295.50  all VarCurr (-v6309(VarCurr)-> (v6306(VarCurr)<->x552(VarCurr))).
% 297.25/295.50  all VarCurr (v6309(VarCurr)-> (v6306(VarCurr)<->v6293(VarCurr))).
% 297.25/295.50  all VarCurr (v6309(VarCurr)<->v6310(VarCurr)&v6311(VarCurr)).
% 297.25/295.50  all VarCurr (-v6311(VarCurr)<->v6291(VarCurr)).
% 297.25/295.50  all VarCurr (-v6310(VarCurr)<->v6289(VarCurr)).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6298(VarNext)<->v6299(VarNext))).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6299(VarNext)<->v6300(VarNext)&v6295(VarNext))).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6300(VarNext)<->v6302(VarNext))).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6302(VarNext)<->v6295(VarCurr))).
% 297.25/295.50  v6287(constB0)<->$F.
% 297.25/295.50  all VarCurr (v6295(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.25/295.50  all VarCurr (v6293(VarCurr)<->v202(VarCurr,bitIndex58)).
% 297.25/295.50  all VarCurr (v202(VarCurr,bitIndex58)<->v204(VarCurr,bitIndex58)).
% 297.25/295.50  all VarCurr (v204(VarCurr,bitIndex58)<->v546(VarCurr,bitIndex58)).
% 297.25/295.50  all VarCurr (v206(VarCurr,bitIndex2)<->v208(VarCurr,bitIndex2)).
% 297.25/295.50  all VarCurr (v208(VarCurr,bitIndex2)<->v210(VarCurr,bitIndex2)).
% 297.25/295.50  all VarCurr (v6291(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.25/295.50  all VarCurr (v6289(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6270(VarNext)-> (v6259(VarNext)<->v6259(VarCurr)))).
% 297.25/295.50  all VarNext (v6270(VarNext)-> (v6259(VarNext)<->v6280(VarNext))).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6280(VarNext)<->v6278(VarCurr))).
% 297.25/295.50  all VarCurr (-v6281(VarCurr)-> (v6278(VarCurr)<->x552(VarCurr))).
% 297.25/295.50  all VarCurr (v6281(VarCurr)-> (v6278(VarCurr)<->v6265(VarCurr))).
% 297.25/295.50  all VarCurr (v6281(VarCurr)<->v6282(VarCurr)&v6283(VarCurr)).
% 297.25/295.50  all VarCurr (-v6283(VarCurr)<->v6263(VarCurr)).
% 297.25/295.50  all VarCurr (-v6282(VarCurr)<->v6261(VarCurr)).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6270(VarNext)<->v6271(VarNext))).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6271(VarNext)<->v6272(VarNext)&v6267(VarNext))).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6272(VarNext)<->v6274(VarNext))).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6274(VarNext)<->v6267(VarCurr))).
% 297.25/295.50  v6259(constB0)<->$F.
% 297.25/295.50  all VarCurr (v6267(VarCurr)<->v103(VarCurr,bitIndex1)).
% 297.25/295.50  all VarCurr (v6265(VarCurr)<->v202(VarCurr,bitIndex59)).
% 297.25/295.50  all VarCurr (v202(VarCurr,bitIndex59)<->v204(VarCurr,bitIndex59)).
% 297.25/295.50  all VarCurr (v204(VarCurr,bitIndex59)<->v546(VarCurr,bitIndex59)).
% 297.25/295.50  all VarCurr (v206(VarCurr,bitIndex3)<->v208(VarCurr,bitIndex3)).
% 297.25/295.50  all VarCurr (v208(VarCurr,bitIndex3)<->v210(VarCurr,bitIndex3)).
% 297.25/295.50  all VarCurr (v6263(VarCurr)<->v184(VarCurr,bitIndex1)).
% 297.25/295.50  all VarCurr (v6261(VarCurr)<->v85(VarCurr,bitIndex1)).
% 297.25/295.50  all VarCurr B (range_8_0(B)-> (v5939(VarCurr,B)<->v5941(VarCurr,B))).
% 297.25/295.50  all VarCurr (v5941(VarCurr,bitIndex8)<->v5943(VarCurr)).
% 297.25/295.50  all VarCurr (v5941(VarCurr,bitIndex7)<->v6010(VarCurr)).
% 297.25/295.50  all VarCurr (v5941(VarCurr,bitIndex6)<->v6046(VarCurr)).
% 297.25/295.50  all VarCurr (v5941(VarCurr,bitIndex5)<->v6082(VarCurr)).
% 297.25/295.50  all VarCurr (v5941(VarCurr,bitIndex4)<->v6110(VarCurr)).
% 297.25/295.50  all VarCurr (v5941(VarCurr,bitIndex3)<->v6138(VarCurr)).
% 297.25/295.50  all VarCurr (v5941(VarCurr,bitIndex2)<->v6166(VarCurr)).
% 297.25/295.50  all VarCurr (v5941(VarCurr,bitIndex1)<->v6194(VarCurr)).
% 297.25/295.50  all VarCurr (v5941(VarCurr,bitIndex0)<->v6222(VarCurr)).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6233(VarNext)-> (v6222(VarNext)<->v6222(VarCurr)))).
% 297.25/295.50  all VarNext (v6233(VarNext)-> (v6222(VarNext)<->v6243(VarNext))).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6243(VarNext)<->v6241(VarCurr))).
% 297.25/295.50  all VarCurr (-v6244(VarCurr)-> (v6241(VarCurr)<->x552(VarCurr))).
% 297.25/295.50  all VarCurr (v6244(VarCurr)-> (v6241(VarCurr)<->v6228(VarCurr))).
% 297.25/295.50  all VarCurr (v6244(VarCurr)<->v6245(VarCurr)&v6246(VarCurr)).
% 297.25/295.50  all VarCurr (-v6246(VarCurr)<->v6226(VarCurr)).
% 297.25/295.50  all VarCurr (-v6245(VarCurr)<->v6224(VarCurr)).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6233(VarNext)<->v6234(VarNext))).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6234(VarNext)<->v6235(VarNext)&v6230(VarNext))).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6235(VarNext)<->v6237(VarNext))).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6237(VarNext)<->v6230(VarCurr))).
% 297.25/295.50  v6222(constB0)<->$F.
% 297.25/295.50  all VarCurr (v6230(VarCurr)<->v103(VarCurr,bitIndex0)).
% 297.25/295.50  all VarCurr (v6228(VarCurr)<->v5951(VarCurr,bitIndex0)).
% 297.25/295.50  all VarCurr (v5951(VarCurr,bitIndex0)<->v5953(VarCurr,bitIndex0)).
% 297.25/295.50  all VarCurr (v5953(VarCurr,bitIndex0)<->v5988(VarCurr,bitIndex0)).
% 297.25/295.50  all VarCurr (v5955(VarCurr,bitIndex0)<->v5987(VarCurr,bitIndex0)).
% 297.25/295.50  all VarCurr (v5972(VarCurr,bitIndex0)<->v5974(VarCurr,bitIndex0)).
% 297.25/295.50  all VarCurr (v5974(VarCurr,bitIndex0)<->v5976(VarCurr,bitIndex0)).
% 297.25/295.50  all VarCurr (v5976(VarCurr,bitIndex0)<->v5978(VarCurr,bitIndex0)).
% 297.25/295.50  all VarCurr (v5978(VarCurr,bitIndex0)<->v1210(VarCurr,bitIndex0)).
% 297.25/295.50  all VarCurr (v5957(VarCurr,bitIndex0)<->v5959(VarCurr,bitIndex0)).
% 297.25/295.50  all VarCurr (v5959(VarCurr,bitIndex0)<->v5961(VarCurr,bitIndex0)).
% 297.25/295.50  all VarCurr (v5961(VarCurr,bitIndex0)<->v5963(VarCurr,bitIndex0)).
% 297.25/295.50  all VarCurr (v5963(VarCurr,bitIndex0)<->v5970(VarCurr,bitIndex0)).
% 297.25/295.50  all VarCurr (v6226(VarCurr)<->v184(VarCurr,bitIndex0)).
% 297.25/295.50  all VarCurr (v6224(VarCurr)<->v85(VarCurr,bitIndex0)).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6205(VarNext)-> (v6194(VarNext)<->v6194(VarCurr)))).
% 297.25/295.50  all VarNext (v6205(VarNext)-> (v6194(VarNext)<->v6215(VarNext))).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6215(VarNext)<->v6213(VarCurr))).
% 297.25/295.50  all VarCurr (-v6216(VarCurr)-> (v6213(VarCurr)<->x552(VarCurr))).
% 297.25/295.50  all VarCurr (v6216(VarCurr)-> (v6213(VarCurr)<->v6200(VarCurr))).
% 297.25/295.50  all VarCurr (v6216(VarCurr)<->v6217(VarCurr)&v6218(VarCurr)).
% 297.25/295.50  all VarCurr (-v6218(VarCurr)<->v6198(VarCurr)).
% 297.25/295.50  all VarCurr (-v6217(VarCurr)<->v6196(VarCurr)).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6205(VarNext)<->v6206(VarNext))).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6206(VarNext)<->v6207(VarNext)&v6202(VarNext))).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6207(VarNext)<->v6209(VarNext))).
% 297.25/295.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6209(VarNext)<->v6202(VarCurr))).
% 297.25/295.50  v6194(constB0)<->$F.
% 297.25/295.50  all VarCurr (v6202(VarCurr)<->v103(VarCurr,bitIndex0)).
% 297.25/295.50  all VarCurr (v6200(VarCurr)<->v5951(VarCurr,bitIndex1)).
% 297.25/295.50  all VarCurr (v5951(VarCurr,bitIndex1)<->v5953(VarCurr,bitIndex1)).
% 297.25/295.50  all VarCurr (v5953(VarCurr,bitIndex1)<->v5988(VarCurr,bitIndex1)).
% 297.25/295.50  all VarCurr (v5955(VarCurr,bitIndex1)<->v5987(VarCurr,bitIndex1)).
% 297.25/295.50  all VarCurr (v5972(VarCurr,bitIndex1)<->v5974(VarCurr,bitIndex1)).
% 297.25/295.50  all VarCurr (v5974(VarCurr,bitIndex1)<->v5976(VarCurr,bitIndex1)).
% 297.25/295.50  all VarCurr (v5976(VarCurr,bitIndex1)<->v5978(VarCurr,bitIndex1)).
% 297.25/295.50  all VarCurr (v5978(VarCurr,bitIndex1)<->v1210(VarCurr,bitIndex1)).
% 297.25/295.51  all VarCurr (v5957(VarCurr,bitIndex1)<->v5959(VarCurr,bitIndex1)).
% 297.25/295.51  all VarCurr (v5959(VarCurr,bitIndex1)<->v5961(VarCurr,bitIndex1)).
% 297.25/295.51  all VarCurr (v5961(VarCurr,bitIndex1)<->v5963(VarCurr,bitIndex1)).
% 297.25/295.51  all VarCurr (v5963(VarCurr,bitIndex1)<->v5970(VarCurr,bitIndex1)).
% 297.25/295.51  all VarCurr (v6198(VarCurr)<->v184(VarCurr,bitIndex0)).
% 297.25/295.51  all VarCurr (v6196(VarCurr)<->v85(VarCurr,bitIndex0)).
% 297.25/295.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6177(VarNext)-> (v6166(VarNext)<->v6166(VarCurr)))).
% 297.25/295.51  all VarNext (v6177(VarNext)-> (v6166(VarNext)<->v6187(VarNext))).
% 297.25/295.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6187(VarNext)<->v6185(VarCurr))).
% 297.25/295.51  all VarCurr (-v6188(VarCurr)-> (v6185(VarCurr)<->x552(VarCurr))).
% 297.25/295.51  all VarCurr (v6188(VarCurr)-> (v6185(VarCurr)<->v6172(VarCurr))).
% 297.25/295.51  all VarCurr (v6188(VarCurr)<->v6189(VarCurr)&v6190(VarCurr)).
% 297.25/295.51  all VarCurr (-v6190(VarCurr)<->v6170(VarCurr)).
% 297.25/295.51  all VarCurr (-v6189(VarCurr)<->v6168(VarCurr)).
% 297.25/295.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6177(VarNext)<->v6178(VarNext))).
% 297.25/295.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6178(VarNext)<->v6179(VarNext)&v6174(VarNext))).
% 297.25/295.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6179(VarNext)<->v6181(VarNext))).
% 297.25/295.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6181(VarNext)<->v6174(VarCurr))).
% 297.25/295.51  v6166(constB0)<->$F.
% 297.25/295.51  all VarCurr (v6174(VarCurr)<->v103(VarCurr,bitIndex0)).
% 297.25/295.51  all VarCurr (v6172(VarCurr)<->v5951(VarCurr,bitIndex2)).
% 297.25/295.51  all VarCurr (v5951(VarCurr,bitIndex2)<->v5953(VarCurr,bitIndex2)).
% 297.25/295.51  all VarCurr (v5953(VarCurr,bitIndex2)<->v5988(VarCurr,bitIndex2)).
% 297.25/295.51  all VarCurr (v5955(VarCurr,bitIndex2)<->v5987(VarCurr,bitIndex2)).
% 297.25/295.51  all VarCurr (v5972(VarCurr,bitIndex2)<->v5974(VarCurr,bitIndex2)).
% 297.25/295.51  all VarCurr (v5974(VarCurr,bitIndex2)<->v5976(VarCurr,bitIndex2)).
% 297.25/295.51  all VarCurr (v5976(VarCurr,bitIndex2)<->v5978(VarCurr,bitIndex2)).
% 297.25/295.51  all VarCurr (v5978(VarCurr,bitIndex2)<->v1210(VarCurr,bitIndex2)).
% 297.25/295.51  all VarCurr (v5957(VarCurr,bitIndex2)<->v5959(VarCurr,bitIndex2)).
% 297.25/295.51  all VarCurr (v5959(VarCurr,bitIndex2)<->v5961(VarCurr,bitIndex2)).
% 297.25/295.51  all VarCurr (v5961(VarCurr,bitIndex2)<->v5963(VarCurr,bitIndex2)).
% 297.25/295.51  all VarCurr (v5963(VarCurr,bitIndex2)<->v5970(VarCurr,bitIndex2)).
% 297.25/295.51  all VarCurr (v6170(VarCurr)<->v184(VarCurr,bitIndex0)).
% 297.25/295.51  all VarCurr (v6168(VarCurr)<->v85(VarCurr,bitIndex0)).
% 297.25/295.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6149(VarNext)-> (v6138(VarNext)<->v6138(VarCurr)))).
% 297.25/295.51  all VarNext (v6149(VarNext)-> (v6138(VarNext)<->v6159(VarNext))).
% 297.25/295.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6159(VarNext)<->v6157(VarCurr))).
% 297.25/295.51  all VarCurr (-v6160(VarCurr)-> (v6157(VarCurr)<->x552(VarCurr))).
% 297.25/295.51  all VarCurr (v6160(VarCurr)-> (v6157(VarCurr)<->v6144(VarCurr))).
% 297.25/295.51  all VarCurr (v6160(VarCurr)<->v6161(VarCurr)&v6162(VarCurr)).
% 297.25/295.51  all VarCurr (-v6162(VarCurr)<->v6142(VarCurr)).
% 297.25/295.51  all VarCurr (-v6161(VarCurr)<->v6140(VarCurr)).
% 297.25/295.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6149(VarNext)<->v6150(VarNext))).
% 297.25/295.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6150(VarNext)<->v6151(VarNext)&v6146(VarNext))).
% 297.25/295.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6151(VarNext)<->v6153(VarNext))).
% 297.25/295.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6153(VarNext)<->v6146(VarCurr))).
% 297.25/295.51  v6138(constB0)<->$F.
% 297.25/295.51  all VarCurr (v6146(VarCurr)<->v103(VarCurr,bitIndex0)).
% 297.25/295.51  all VarCurr (v6144(VarCurr)<->v5951(VarCurr,bitIndex3)).
% 297.25/295.51  all VarCurr (v5951(VarCurr,bitIndex3)<->v5953(VarCurr,bitIndex3)).
% 297.25/295.51  all VarCurr (v5953(VarCurr,bitIndex3)<->v5988(VarCurr,bitIndex3)).
% 297.25/295.51  all VarCurr (v5955(VarCurr,bitIndex3)<->v5987(VarCurr,bitIndex3)).
% 297.25/295.51  all VarCurr (v5972(VarCurr,bitIndex3)<->v5974(VarCurr,bitIndex3)).
% 297.25/295.51  all VarCurr (v5974(VarCurr,bitIndex3)<->v5976(VarCurr,bitIndex3)).
% 297.25/295.51  all VarCurr (v5976(VarCurr,bitIndex3)<->v5978(VarCurr,bitIndex3)).
% 297.25/295.51  all VarCurr (v5978(VarCurr,bitIndex3)<->v1210(VarCurr,bitIndex3)).
% 297.25/295.51  all VarCurr (v5957(VarCurr,bitIndex3)<->v5959(VarCurr,bitIndex3)).
% 297.25/295.51  all VarCurr (v5959(VarCurr,bitIndex3)<->v5961(VarCurr,bitIndex3)).
% 297.25/295.51  all VarCurr (v5961(VarCurr,bitIndex3)<->v5963(VarCurr,bitIndex3)).
% 297.25/295.51  all VarCurr (v5963(VarCurr,bitIndex3)<->v5970(VarCurr,bitIndex3)).
% 297.25/295.51  all VarCurr (v5965(VarCurr,bitIndex0)<->v5967(VarCurr,bitIndex0)).
% 297.25/295.52  all VarCurr (v5967(VarCurr,bitIndex0)<->v5969(VarCurr,bitIndex0)).
% 297.25/295.52  all VarCurr (v5969(VarCurr,bitIndex0)<->v986(VarCurr,bitIndex0)).
% 297.25/295.52  all VarCurr (v6142(VarCurr)<->v184(VarCurr,bitIndex0)).
% 297.25/295.52  all VarCurr (v6140(VarCurr)<->v85(VarCurr,bitIndex0)).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6121(VarNext)-> (v6110(VarNext)<->v6110(VarCurr)))).
% 297.25/295.52  all VarNext (v6121(VarNext)-> (v6110(VarNext)<->v6131(VarNext))).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6131(VarNext)<->v6129(VarCurr))).
% 297.25/295.52  all VarCurr (-v6132(VarCurr)-> (v6129(VarCurr)<->x552(VarCurr))).
% 297.25/295.52  all VarCurr (v6132(VarCurr)-> (v6129(VarCurr)<->v6116(VarCurr))).
% 297.25/295.52  all VarCurr (v6132(VarCurr)<->v6133(VarCurr)&v6134(VarCurr)).
% 297.25/295.52  all VarCurr (-v6134(VarCurr)<->v6114(VarCurr)).
% 297.25/295.52  all VarCurr (-v6133(VarCurr)<->v6112(VarCurr)).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6121(VarNext)<->v6122(VarNext))).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6122(VarNext)<->v6123(VarNext)&v6118(VarNext))).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6123(VarNext)<->v6125(VarNext))).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6125(VarNext)<->v6118(VarCurr))).
% 297.25/295.52  v6110(constB0)<->$F.
% 297.25/295.52  all VarCurr (v6118(VarCurr)<->v103(VarCurr,bitIndex0)).
% 297.25/295.52  all VarCurr (v6116(VarCurr)<->v5951(VarCurr,bitIndex4)).
% 297.25/295.52  all VarCurr (v5951(VarCurr,bitIndex4)<->v5953(VarCurr,bitIndex4)).
% 297.25/295.52  all VarCurr (v5953(VarCurr,bitIndex4)<->v5988(VarCurr,bitIndex4)).
% 297.25/295.52  all VarCurr (v5955(VarCurr,bitIndex4)<->v5987(VarCurr,bitIndex4)).
% 297.25/295.52  all VarCurr (v5972(VarCurr,bitIndex4)<->v5974(VarCurr,bitIndex4)).
% 297.25/295.52  all VarCurr (v5974(VarCurr,bitIndex4)<->v5976(VarCurr,bitIndex4)).
% 297.25/295.52  all VarCurr (v5976(VarCurr,bitIndex4)<->v5978(VarCurr,bitIndex4)).
% 297.25/295.52  all VarCurr (v5978(VarCurr,bitIndex4)<->v1210(VarCurr,bitIndex4)).
% 297.25/295.52  all VarCurr (v5957(VarCurr,bitIndex4)<->v5959(VarCurr,bitIndex4)).
% 297.25/295.52  all VarCurr (v5959(VarCurr,bitIndex4)<->v5961(VarCurr,bitIndex4)).
% 297.25/295.52  all VarCurr (v5961(VarCurr,bitIndex4)<->v5963(VarCurr,bitIndex4)).
% 297.25/295.52  all VarCurr (v5963(VarCurr,bitIndex4)<->v5970(VarCurr,bitIndex4)).
% 297.25/295.52  all VarCurr (v5965(VarCurr,bitIndex1)<->v5967(VarCurr,bitIndex1)).
% 297.25/295.52  all VarCurr (v5967(VarCurr,bitIndex1)<->v5969(VarCurr,bitIndex1)).
% 297.25/295.52  all VarCurr (v5969(VarCurr,bitIndex1)<->v986(VarCurr,bitIndex1)).
% 297.25/295.52  all VarCurr (v6114(VarCurr)<->v184(VarCurr,bitIndex0)).
% 297.25/295.52  all VarCurr (v6112(VarCurr)<->v85(VarCurr,bitIndex0)).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6093(VarNext)-> (v6082(VarNext)<->v6082(VarCurr)))).
% 297.25/295.52  all VarNext (v6093(VarNext)-> (v6082(VarNext)<->v6103(VarNext))).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6103(VarNext)<->v6101(VarCurr))).
% 297.25/295.52  all VarCurr (-v6104(VarCurr)-> (v6101(VarCurr)<->x552(VarCurr))).
% 297.25/295.52  all VarCurr (v6104(VarCurr)-> (v6101(VarCurr)<->v6088(VarCurr))).
% 297.25/295.52  all VarCurr (v6104(VarCurr)<->v6105(VarCurr)&v6106(VarCurr)).
% 297.25/295.52  all VarCurr (-v6106(VarCurr)<->v6086(VarCurr)).
% 297.25/295.52  all VarCurr (-v6105(VarCurr)<->v6084(VarCurr)).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6093(VarNext)<->v6094(VarNext))).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6094(VarNext)<->v6095(VarNext)&v6090(VarNext))).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6095(VarNext)<->v6097(VarNext))).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6097(VarNext)<->v6090(VarCurr))).
% 297.25/295.52  v6082(constB0)<->$F.
% 297.25/295.52  all VarCurr (v6090(VarCurr)<->v103(VarCurr,bitIndex0)).
% 297.25/295.52  all VarCurr (v6088(VarCurr)<->v5951(VarCurr,bitIndex5)).
% 297.25/295.52  all VarCurr (v5951(VarCurr,bitIndex5)<->v5953(VarCurr,bitIndex5)).
% 297.25/295.52  all VarCurr (v5953(VarCurr,bitIndex5)<->v5988(VarCurr,bitIndex5)).
% 297.25/295.52  all VarCurr (v5955(VarCurr,bitIndex5)<->v5987(VarCurr,bitIndex5)).
% 297.25/295.52  all VarCurr (v5972(VarCurr,bitIndex5)<->v5974(VarCurr,bitIndex5)).
% 297.25/295.52  all VarCurr (v5974(VarCurr,bitIndex5)<->v5976(VarCurr,bitIndex5)).
% 297.25/295.52  all VarCurr (v5976(VarCurr,bitIndex5)<->v5978(VarCurr,bitIndex5)).
% 297.25/295.52  all VarCurr (v5978(VarCurr,bitIndex5)<->v1210(VarCurr,bitIndex5)).
% 297.25/295.52  all VarCurr (v5957(VarCurr,bitIndex5)<->v5959(VarCurr,bitIndex5)).
% 297.25/295.52  all VarCurr (v5959(VarCurr,bitIndex5)<->v5961(VarCurr,bitIndex5)).
% 297.25/295.52  all VarCurr (v5961(VarCurr,bitIndex5)<->v5963(VarCurr,bitIndex5)).
% 297.25/295.52  all VarCurr (v5963(VarCurr,bitIndex5)<->v5970(VarCurr,bitIndex5)).
% 297.25/295.52  all VarCurr (v5965(VarCurr,bitIndex2)<->v5967(VarCurr,bitIndex2)).
% 297.25/295.52  all VarCurr (v5967(VarCurr,bitIndex2)<->v5969(VarCurr,bitIndex2)).
% 297.25/295.52  all VarCurr (v5969(VarCurr,bitIndex2)<->v986(VarCurr,bitIndex2)).
% 297.25/295.52  all VarCurr (v6086(VarCurr)<->v184(VarCurr,bitIndex0)).
% 297.25/295.52  all VarCurr (v6084(VarCurr)<->v85(VarCurr,bitIndex0)).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6065(VarNext)-> (v6046(VarNext)<->v6046(VarCurr)))).
% 297.25/295.52  all VarNext (v6065(VarNext)-> (v6046(VarNext)<->v6075(VarNext))).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6075(VarNext)<->v6073(VarCurr))).
% 297.25/295.52  all VarCurr (-v6076(VarCurr)-> (v6073(VarCurr)<->x552(VarCurr))).
% 297.25/295.52  all VarCurr (v6076(VarCurr)-> (v6073(VarCurr)<->v6052(VarCurr))).
% 297.25/295.52  all VarCurr (v6076(VarCurr)<->v6077(VarCurr)&v6078(VarCurr)).
% 297.25/295.52  all VarCurr (-v6078(VarCurr)<->v6050(VarCurr)).
% 297.25/295.52  all VarCurr (-v6077(VarCurr)<->v6048(VarCurr)).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6065(VarNext)<->v6066(VarNext))).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6066(VarNext)<->v6067(VarNext)&v6062(VarNext))).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6067(VarNext)<->v6069(VarNext))).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6069(VarNext)<->v6062(VarCurr))).
% 297.25/295.52  v6046(constB0)<->$F.
% 297.25/295.52  all VarCurr (v6062(VarCurr)<->v103(VarCurr,bitIndex0)).
% 297.25/295.52  all VarCurr (v6052(VarCurr)<->v5951(VarCurr,bitIndex6)).
% 297.25/295.52  all VarCurr (v5951(VarCurr,bitIndex6)<->v5953(VarCurr,bitIndex6)).
% 297.25/295.52  all VarCurr (v5953(VarCurr,bitIndex6)<->v5988(VarCurr,bitIndex6)).
% 297.25/295.52  all VarCurr (v5955(VarCurr,bitIndex6)<->v5987(VarCurr,bitIndex6)).
% 297.25/295.52  all VarCurr (v5972(VarCurr,bitIndex6)<->v5974(VarCurr,bitIndex6)).
% 297.25/295.52  all VarCurr (v5974(VarCurr,bitIndex6)<->v5976(VarCurr,bitIndex6)).
% 297.25/295.52  all VarCurr (v5976(VarCurr,bitIndex6)<->v5978(VarCurr,bitIndex6)).
% 297.25/295.52  all VarCurr (v5978(VarCurr,bitIndex6)<->v1210(VarCurr,bitIndex6)).
% 297.25/295.52  all VarNext (v1210(VarNext,bitIndex6)<->v6054(VarNext,bitIndex6)).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6055(VarNext)-> (all B (range_8_0(B)-> (v6054(VarNext,B)<->v1210(VarCurr,B)))))).
% 297.25/295.52  all VarNext (v6055(VarNext)-> (all B (range_8_0(B)-> (v6054(VarNext,B)<->v2196(VarNext,B))))).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6055(VarNext)<->v6056(VarNext))).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6056(VarNext)<->v6058(VarNext)&v712(VarNext))).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6058(VarNext)<->v721(VarNext))).
% 297.25/295.52  all VarCurr (v5957(VarCurr,bitIndex6)<->v5959(VarCurr,bitIndex6)).
% 297.25/295.52  all VarCurr (v5959(VarCurr,bitIndex6)<->v5961(VarCurr,bitIndex6)).
% 297.25/295.52  all VarCurr (v5961(VarCurr,bitIndex6)<->v5963(VarCurr,bitIndex6)).
% 297.25/295.52  all VarCurr (v5963(VarCurr,bitIndex6)<->v5970(VarCurr,bitIndex6)).
% 297.25/295.52  all VarCurr (v5965(VarCurr,bitIndex3)<->v5967(VarCurr,bitIndex3)).
% 297.25/295.52  all VarCurr (v5967(VarCurr,bitIndex3)<->v5969(VarCurr,bitIndex3)).
% 297.25/295.52  all VarCurr (v5969(VarCurr,bitIndex3)<->v986(VarCurr,bitIndex3)).
% 297.25/295.52  all VarCurr (v6050(VarCurr)<->v184(VarCurr,bitIndex0)).
% 297.25/295.52  all VarCurr (v6048(VarCurr)<->v85(VarCurr,bitIndex0)).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6029(VarNext)-> (v6010(VarNext)<->v6010(VarCurr)))).
% 297.25/295.52  all VarNext (v6029(VarNext)-> (v6010(VarNext)<->v6039(VarNext))).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6039(VarNext)<->v6037(VarCurr))).
% 297.25/295.52  all VarCurr (-v6040(VarCurr)-> (v6037(VarCurr)<->x552(VarCurr))).
% 297.25/295.52  all VarCurr (v6040(VarCurr)-> (v6037(VarCurr)<->v6016(VarCurr))).
% 297.25/295.52  all VarCurr (v6040(VarCurr)<->v6041(VarCurr)&v6042(VarCurr)).
% 297.25/295.52  all VarCurr (-v6042(VarCurr)<->v6014(VarCurr)).
% 297.25/295.52  all VarCurr (-v6041(VarCurr)<->v6012(VarCurr)).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6029(VarNext)<->v6030(VarNext))).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6030(VarNext)<->v6031(VarNext)&v6026(VarNext))).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6031(VarNext)<->v6033(VarNext))).
% 297.25/295.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6033(VarNext)<->v6026(VarCurr))).
% 297.25/295.52  v6010(constB0)<->$F.
% 297.25/295.52  all VarCurr (v6026(VarCurr)<->v103(VarCurr,bitIndex0)).
% 297.25/295.52  all VarCurr (v6016(VarCurr)<->v5951(VarCurr,bitIndex7)).
% 297.25/295.52  all VarCurr (v5951(VarCurr,bitIndex7)<->v5953(VarCurr,bitIndex7)).
% 297.25/295.53  all VarCurr (v5953(VarCurr,bitIndex7)<->v5988(VarCurr,bitIndex7)).
% 297.25/295.53  all VarCurr (v5955(VarCurr,bitIndex7)<->v5987(VarCurr,bitIndex7)).
% 297.25/295.53  all VarCurr (v5972(VarCurr,bitIndex7)<->v5974(VarCurr,bitIndex7)).
% 297.25/295.53  all VarCurr (v5974(VarCurr,bitIndex7)<->v5976(VarCurr,bitIndex7)).
% 297.25/295.53  all VarCurr (v5976(VarCurr,bitIndex7)<->v5978(VarCurr,bitIndex7)).
% 297.25/295.53  all VarCurr (v5978(VarCurr,bitIndex7)<->v1210(VarCurr,bitIndex7)).
% 297.25/295.53  all VarNext (v1210(VarNext,bitIndex7)<->v6018(VarNext,bitIndex7)).
% 297.25/295.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6019(VarNext)-> (all B (range_8_0(B)-> (v6018(VarNext,B)<->v1210(VarCurr,B)))))).
% 297.25/295.53  all VarNext (v6019(VarNext)-> (all B (range_8_0(B)-> (v6018(VarNext,B)<->v2196(VarNext,B))))).
% 297.25/295.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6019(VarNext)<->v6020(VarNext))).
% 297.25/295.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6020(VarNext)<->v6022(VarNext)&v712(VarNext))).
% 297.25/295.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v6022(VarNext)<->v721(VarNext))).
% 297.25/295.53  all VarCurr (v5957(VarCurr,bitIndex7)<->v5959(VarCurr,bitIndex7)).
% 297.25/295.53  all VarCurr (v5959(VarCurr,bitIndex7)<->v5961(VarCurr,bitIndex7)).
% 297.25/295.53  all VarCurr (v5961(VarCurr,bitIndex7)<->v5963(VarCurr,bitIndex7)).
% 297.25/295.53  all VarCurr (v5963(VarCurr,bitIndex7)<->v5970(VarCurr,bitIndex7)).
% 297.25/295.53  all VarCurr (v5965(VarCurr,bitIndex4)<->v5967(VarCurr,bitIndex4)).
% 297.25/295.53  all VarCurr (v5967(VarCurr,bitIndex4)<->v5969(VarCurr,bitIndex4)).
% 297.25/295.53  all VarCurr (v5969(VarCurr,bitIndex4)<->v986(VarCurr,bitIndex4)).
% 297.25/295.53  all VarCurr (v6014(VarCurr)<->v184(VarCurr,bitIndex0)).
% 297.25/295.53  all VarCurr (v6012(VarCurr)<->v85(VarCurr,bitIndex0)).
% 297.25/295.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5993(VarNext)-> (v5943(VarNext)<->v5943(VarCurr)))).
% 297.25/295.53  all VarNext (v5993(VarNext)-> (v5943(VarNext)<->v6003(VarNext))).
% 297.25/295.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v6003(VarNext)<->v6001(VarCurr))).
% 297.25/295.53  all VarCurr (-v6004(VarCurr)-> (v6001(VarCurr)<->x552(VarCurr))).
% 297.25/295.53  all VarCurr (v6004(VarCurr)-> (v6001(VarCurr)<->v5949(VarCurr))).
% 297.25/295.53  all VarCurr (v6004(VarCurr)<->v6005(VarCurr)&v6006(VarCurr)).
% 297.25/295.53  all VarCurr (-v6006(VarCurr)<->v5947(VarCurr)).
% 297.25/295.53  all VarCurr (-v6005(VarCurr)<->v5945(VarCurr)).
% 297.25/295.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5993(VarNext)<->v5994(VarNext))).
% 297.25/295.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5994(VarNext)<->v5995(VarNext)&v5990(VarNext))).
% 297.25/295.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5995(VarNext)<->v5997(VarNext))).
% 297.25/295.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5997(VarNext)<->v5990(VarCurr))).
% 297.25/295.53  v5943(constB0)<->$F.
% 297.25/295.53  all VarCurr (v5990(VarCurr)<->v103(VarCurr,bitIndex0)).
% 297.25/295.53  all VarCurr (v5949(VarCurr)<->v5951(VarCurr,bitIndex8)).
% 297.25/295.53  all VarCurr (v5951(VarCurr,bitIndex8)<->v5953(VarCurr,bitIndex8)).
% 297.25/295.53  all VarCurr (v5953(VarCurr,bitIndex8)<->v5988(VarCurr,bitIndex8)).
% 297.25/295.53  all VarCurr (-v154(VarCurr)-> (all B (range_8_0(B)-> (v5988(VarCurr,B)<->v5955(VarCurr,B))))).
% 297.25/295.53  all VarCurr (v154(VarCurr)-> (all B (range_8_0(B)-> (v5988(VarCurr,B)<->v775(VarCurr,B))))).
% 297.25/295.53  all VarCurr (v5955(VarCurr,bitIndex8)<->v5987(VarCurr,bitIndex8)).
% 297.25/295.53  all VarCurr (-v214(VarCurr)-> (all B (range_8_0(B)-> (v5987(VarCurr,B)<->v5972(VarCurr,B))))).
% 297.25/295.53  all VarCurr (v214(VarCurr)-> (all B (range_8_0(B)-> (v5987(VarCurr,B)<->v5957(VarCurr,B))))).
% 297.25/295.53  all VarCurr (v5972(VarCurr,bitIndex8)<->v5974(VarCurr,bitIndex8)).
% 297.25/295.53  all VarCurr (v5974(VarCurr,bitIndex8)<->v5976(VarCurr,bitIndex8)).
% 297.25/295.53  all VarCurr (v5976(VarCurr,bitIndex8)<->v5978(VarCurr,bitIndex8)).
% 297.25/295.53  all VarCurr (v5978(VarCurr,bitIndex8)<->v1210(VarCurr,bitIndex8)).
% 297.25/295.53  all VarNext (v1210(VarNext,bitIndex8)<->v5980(VarNext,bitIndex8)).
% 297.25/295.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5981(VarNext)-> (all B (range_8_0(B)-> (v5980(VarNext,B)<->v1210(VarCurr,B)))))).
% 297.25/295.53  all VarNext (v5981(VarNext)-> (all B (range_8_0(B)-> (v5980(VarNext,B)<->v2196(VarNext,B))))).
% 297.25/295.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5981(VarNext)<->v5982(VarNext))).
% 297.25/295.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5982(VarNext)<->v5984(VarNext)&v712(VarNext))).
% 297.25/295.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5984(VarNext)<->v721(VarNext))).
% 297.25/295.53  all VarCurr (v5957(VarCurr,bitIndex8)<->v5959(VarCurr,bitIndex8)).
% 297.25/295.54  all VarCurr (v5959(VarCurr,bitIndex8)<->v5961(VarCurr,bitIndex8)).
% 297.25/295.54  all VarCurr (v5961(VarCurr,bitIndex8)<->v5963(VarCurr,bitIndex8)).
% 297.25/295.54  all VarCurr (v5963(VarCurr,bitIndex8)<->v5970(VarCurr,bitIndex8)).
% 297.25/295.54  all VarCurr B (range_2_0(B)-> (v5970(VarCurr,B)<->v399(VarCurr,B))).
% 297.25/295.54  all VarCurr ((v5970(VarCurr,bitIndex8)<->v5965(VarCurr,bitIndex5))& (v5970(VarCurr,bitIndex7)<->v5965(VarCurr,bitIndex4))& (v5970(VarCurr,bitIndex6)<->v5965(VarCurr,bitIndex3))& (v5970(VarCurr,bitIndex5)<->v5965(VarCurr,bitIndex2))& (v5970(VarCurr,bitIndex4)<->v5965(VarCurr,bitIndex1))& (v5970(VarCurr,bitIndex3)<->v5965(VarCurr,bitIndex0))).
% 297.25/295.54  all VarCurr (v5965(VarCurr,bitIndex5)<->v5967(VarCurr,bitIndex5)).
% 297.25/295.54  all VarCurr (v5967(VarCurr,bitIndex5)<->v5969(VarCurr,bitIndex5)).
% 297.25/295.54  all VarCurr (v5969(VarCurr,bitIndex5)<->v986(VarCurr,bitIndex5)).
% 297.25/295.54  all VarCurr (v5947(VarCurr)<->v184(VarCurr,bitIndex0)).
% 297.25/295.54  all VarCurr (v5945(VarCurr)<->v85(VarCurr,bitIndex0)).
% 297.25/295.54  all VarCurr B (range_8_0(B)-> (v757(VarCurr,B)<->v759(VarCurr,B))).
% 297.25/295.54  all VarCurr (v759(VarCurr,bitIndex8)<->v761(VarCurr)).
% 297.25/295.54  all VarCurr (v759(VarCurr,bitIndex7)<->v4505(VarCurr)).
% 297.25/295.54  all VarCurr (v759(VarCurr,bitIndex6)<->v4552(VarCurr)).
% 297.25/295.54  all VarCurr (v759(VarCurr,bitIndex5)<->v4597(VarCurr)).
% 297.25/295.54  all VarCurr (v759(VarCurr,bitIndex4)<->v4642(VarCurr)).
% 297.25/295.54  all VarCurr (v759(VarCurr,bitIndex3)<->v4687(VarCurr)).
% 297.25/295.54  all VarCurr (v759(VarCurr,bitIndex2)<->v4732(VarCurr)).
% 297.25/295.54  all VarCurr (v759(VarCurr,bitIndex1)<->v5785(VarCurr)).
% 297.25/295.54  all VarCurr (v759(VarCurr,bitIndex0)<->v5862(VarCurr)).
% 297.25/295.54  all VarCurr (v5862(VarCurr)<->v5864(VarCurr)).
% 297.25/295.54  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5920(VarNext)-> (v5864(VarNext)<->v5864(VarCurr)))).
% 297.25/295.54  all VarNext (v5920(VarNext)-> (v5864(VarNext)<->v5932(VarNext))).
% 297.25/295.54  all VarCurr (-v5921(VarCurr)-> (v5932(VarCurr)<->v5933(VarCurr))).
% 297.25/295.54  all VarCurr (v5921(VarCurr)-> (v5932(VarCurr)<->v5870(VarCurr))).
% 297.25/295.54  all VarCurr (-v5926(VarCurr)-> (v5933(VarCurr)<->v5904(VarCurr))).
% 297.25/295.54  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5926(VarCurr)-> (v5933(VarCurr)<->x166(VarNext)))).
% 297.25/295.54  all VarCurr (v5920(VarCurr)<->v5921(VarCurr)|v5924(VarCurr)).
% 297.25/295.54  all VarCurr (v5924(VarCurr)<->v5925(VarCurr)&v5931(VarCurr)).
% 297.25/295.54  all VarCurr (-v5931(VarCurr)<->v5921(VarCurr)).
% 297.25/295.54  all VarCurr (v5925(VarCurr)<->v5926(VarCurr)|v5928(VarCurr)).
% 297.25/295.54  all VarCurr (v5928(VarCurr)<->v5929(VarCurr)&v5930(VarCurr)).
% 297.25/295.54  all VarCurr (-v5930(VarCurr)<->v5926(VarCurr)).
% 297.25/295.54  all VarCurr (v5929(VarCurr)<->v5866(VarCurr)&v5868(VarCurr)).
% 297.25/295.54  all VarCurr (v5926(VarCurr)<->v5866(VarCurr)&v5927(VarCurr)).
% 297.25/295.54  all VarCurr (-v5927(VarCurr)<->v5868(VarCurr)).
% 297.25/295.54  all VarCurr (v5921(VarCurr)<->v5922(VarCurr)&v5923(VarCurr)).
% 297.25/295.54  all VarCurr (-v5923(VarCurr)<->v5868(VarCurr)).
% 297.25/295.54  all VarCurr (-v5922(VarCurr)<->v5866(VarCurr)).
% 297.25/295.54  v5864(constB0)<->$F.
% 297.25/295.54  all VarCurr (v5904(VarCurr)<->v4517(VarCurr,bitIndex1)).
% 297.25/295.54  all VarCurr (v4517(VarCurr,bitIndex1)<->v5906(VarCurr)).
% 297.25/295.54  all VarCurr (v5906(VarCurr)<->v5908(VarCurr)).
% 297.25/295.54  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5913(VarNext)-> (v5908(VarNext)<->v5908(VarCurr)))).
% 297.25/295.54  all VarNext (v5913(VarNext)-> (v5908(VarNext)<->v5787(VarNext))).
% 297.25/295.54  all VarCurr (v5913(VarCurr)<->v5791(VarCurr)&v5914(VarCurr)).
% 297.25/295.54  all VarCurr (-v5914(VarCurr)<->v5910(VarCurr)).
% 297.25/295.54  v5908(constB0)<->$F.
% 297.25/295.54  all VarCurr (v5910(VarCurr)<->v184(VarCurr,bitIndex0)).
% 297.25/295.54  all VarCurr (v5870(VarCurr)<->v771(VarCurr,bitIndex0)).
% 297.25/295.54  all VarCurr (v771(VarCurr,bitIndex0)<->v773(VarCurr,bitIndex0)).
% 297.25/295.54  all VarCurr (v773(VarCurr,bitIndex0)<->v4466(VarCurr,bitIndex0)).
% 297.25/295.54  all VarCurr (v781(VarCurr,bitIndex0)<->v4465(VarCurr,bitIndex0)).
% 297.25/295.54  all VarCurr (v4458(VarCurr,bitIndex0)<->v4460(VarCurr,bitIndex0)).
% 297.25/295.54  all VarCurr (v4460(VarCurr,bitIndex0)<->v4462(VarCurr,bitIndex0)).
% 297.25/295.54  all VarCurr (v4462(VarCurr,bitIndex0)<->v4464(VarCurr,bitIndex0)).
% 297.25/295.54  all VarCurr (v4464(VarCurr,bitIndex0)<->v1212(VarCurr,bitIndex0)).
% 297.25/295.54  all VarCurr (v783(VarCurr,bitIndex0)<->v785(VarCurr,bitIndex0)).
% 297.25/295.54  all VarCurr (v785(VarCurr,bitIndex0)<->v787(VarCurr,bitIndex0)).
% 297.25/295.54  all VarCurr (v787(VarCurr,bitIndex0)<->v789(VarCurr,bitIndex0)).
% 297.25/295.54  all VarCurr (v789(VarCurr,bitIndex0)<->v4455(VarCurr,bitIndex0)).
% 297.25/295.54  all VarCurr (v4456(VarCurr,bitIndex0)<->v4743(VarCurr,bitIndex0)).
% 297.25/295.54  all VarCurr (v4743(VarCurr,bitIndex0)<->v4745(VarCurr,bitIndex0)).
% 297.34/295.55  all VarCurr (v4745(VarCurr,bitIndex0)<->v5746(VarCurr,bitIndex0)).
% 297.34/295.55  all VarCurr (v5669(VarCurr,bitIndex0)<->v5736(VarCurr,bitIndex0)).
% 297.34/295.55  all VarNext (v5015(VarNext,bitIndex26)<->v5896(VarNext,bitIndex26)).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5898(VarNext)-> (all B (range_61_0(B)-> (v5896(VarNext,B)<->v5015(VarCurr,B)))))).
% 297.34/295.55  all VarNext (v5898(VarNext)-> (all B (range_61_0(B)-> (v5896(VarNext,B)<->v5657(VarNext,B))))).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5898(VarNext)<->v5899(VarNext)&v5650(VarNext))).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5899(VarNext)<->v5901(VarNext)&v5637(VarNext))).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5901(VarNext)<->v5644(VarNext))).
% 297.34/295.55  all VarCurr (v5028(VarCurr,bitIndex26)<->v5030(VarCurr,bitIndex26)).
% 297.34/295.55  all VarCurr (v5030(VarCurr,bitIndex26)<->v5032(VarCurr,bitIndex26)).
% 297.34/295.55  all VarCurr (v5032(VarCurr,bitIndex26)<->v5034(VarCurr,bitIndex26)).
% 297.34/295.55  all VarCurr (v5034(VarCurr,bitIndex26)<->v5632(VarCurr,bitIndex26)).
% 297.34/295.55  all VarCurr (v5045(VarCurr,bitIndex15)<->v5619(VarCurr,bitIndex15)).
% 297.34/295.55  all VarCurr (v5076(VarCurr,bitIndex12)<->v5107(VarCurr,bitIndex12)).
% 297.34/295.55  all VarCurr (v5076(VarCurr,bitIndex15)<->v5107(VarCurr,bitIndex15)).
% 297.34/295.55  all VarNext (v5015(VarNext,bitIndex20)<->v5888(VarNext,bitIndex20)).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5890(VarNext)-> (all B (range_61_0(B)-> (v5888(VarNext,B)<->v5015(VarCurr,B)))))).
% 297.34/295.55  all VarNext (v5890(VarNext)-> (all B (range_61_0(B)-> (v5888(VarNext,B)<->v5657(VarNext,B))))).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5890(VarNext)<->v5891(VarNext)&v5650(VarNext))).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5891(VarNext)<->v5893(VarNext)&v5637(VarNext))).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5893(VarNext)<->v5644(VarNext))).
% 297.34/295.55  all VarCurr (v5028(VarCurr,bitIndex20)<->v5030(VarCurr,bitIndex20)).
% 297.34/295.55  all VarCurr (v5030(VarCurr,bitIndex20)<->v5032(VarCurr,bitIndex20)).
% 297.34/295.55  all VarCurr (v5032(VarCurr,bitIndex20)<->v5034(VarCurr,bitIndex20)).
% 297.34/295.55  all VarCurr (v5034(VarCurr,bitIndex20)<->v5632(VarCurr,bitIndex20)).
% 297.34/295.55  all VarCurr (v5045(VarCurr,bitIndex9)<->v5619(VarCurr,bitIndex9)).
% 297.34/295.55  all VarCurr (v5076(VarCurr,bitIndex6)<->v5107(VarCurr,bitIndex6)).
% 297.34/295.55  all VarCurr (v5076(VarCurr,bitIndex9)<->v5107(VarCurr,bitIndex9)).
% 297.34/295.55  all VarNext (v5015(VarNext,bitIndex11)<->v5880(VarNext,bitIndex11)).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5882(VarNext)-> (all B (range_61_0(B)-> (v5880(VarNext,B)<->v5015(VarCurr,B)))))).
% 297.34/295.55  all VarNext (v5882(VarNext)-> (all B (range_61_0(B)-> (v5880(VarNext,B)<->v5657(VarNext,B))))).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5882(VarNext)<->v5883(VarNext)&v5650(VarNext))).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5883(VarNext)<->v5885(VarNext)&v5637(VarNext))).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5885(VarNext)<->v5644(VarNext))).
% 297.34/295.55  all VarCurr (v5028(VarCurr,bitIndex11)<->v5030(VarCurr,bitIndex11)).
% 297.34/295.55  all VarCurr (v5030(VarCurr,bitIndex11)<->v5032(VarCurr,bitIndex11)).
% 297.34/295.55  all VarCurr (v5032(VarCurr,bitIndex11)<->v5034(VarCurr,bitIndex11)).
% 297.34/295.55  all VarCurr (v5034(VarCurr,bitIndex11)<->v5632(VarCurr,bitIndex11)).
% 297.34/295.55  all VarCurr (v5045(VarCurr,bitIndex0)<->v5619(VarCurr,bitIndex0)).
% 297.34/295.55  all VarNext (v5015(VarNext,bitIndex14)<->v5872(VarNext,bitIndex14)).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5874(VarNext)-> (all B (range_61_0(B)-> (v5872(VarNext,B)<->v5015(VarCurr,B)))))).
% 297.34/295.55  all VarNext (v5874(VarNext)-> (all B (range_61_0(B)-> (v5872(VarNext,B)<->v5657(VarNext,B))))).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5874(VarNext)<->v5875(VarNext)&v5650(VarNext))).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5875(VarNext)<->v5877(VarNext)&v5637(VarNext))).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5877(VarNext)<->v5644(VarNext))).
% 297.34/295.55  all VarCurr (v5028(VarCurr,bitIndex14)<->v5030(VarCurr,bitIndex14)).
% 297.34/295.55  all VarCurr (v5030(VarCurr,bitIndex14)<->v5032(VarCurr,bitIndex14)).
% 297.34/295.55  all VarCurr (v5032(VarCurr,bitIndex14)<->v5034(VarCurr,bitIndex14)).
% 297.34/295.55  all VarCurr (v5034(VarCurr,bitIndex14)<->v5632(VarCurr,bitIndex14)).
% 297.34/295.55  all VarCurr (v5045(VarCurr,bitIndex3)<->v5619(VarCurr,bitIndex3)).
% 297.34/295.55  all VarCurr (v5076(VarCurr,bitIndex0)<->v5107(VarCurr,bitIndex0)).
% 297.34/295.55  all VarCurr (v5076(VarCurr,bitIndex3)<->v5107(VarCurr,bitIndex3)).
% 297.34/295.55  all VarCurr (v775(VarCurr,bitIndex0)<->v777(VarCurr,bitIndex0)).
% 297.34/295.55  all VarCurr (v777(VarCurr,bitIndex0)<->v779(VarCurr,bitIndex0)).
% 297.34/295.55  all VarCurr (v5868(VarCurr)<->v103(VarCurr,bitIndex0)).
% 297.34/295.55  all VarCurr (v5866(VarCurr)<->v85(VarCurr,bitIndex0)).
% 297.34/295.55  all VarCurr (v5785(VarCurr)<->v5787(VarCurr)).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5843(VarNext)-> (v5787(VarNext)<->v5787(VarCurr)))).
% 297.34/295.55  all VarNext (v5843(VarNext)-> (v5787(VarNext)<->v5855(VarNext))).
% 297.34/295.55  all VarCurr (-v5844(VarCurr)-> (v5855(VarCurr)<->v5856(VarCurr))).
% 297.34/295.55  all VarCurr (v5844(VarCurr)-> (v5855(VarCurr)<->v5793(VarCurr))).
% 297.34/295.55  all VarCurr (-v5849(VarCurr)-> (v5856(VarCurr)<->v5827(VarCurr))).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5849(VarCurr)-> (v5856(VarCurr)<->x166(VarNext)))).
% 297.34/295.55  all VarCurr (v5843(VarCurr)<->v5844(VarCurr)|v5847(VarCurr)).
% 297.34/295.55  all VarCurr (v5847(VarCurr)<->v5848(VarCurr)&v5854(VarCurr)).
% 297.34/295.55  all VarCurr (-v5854(VarCurr)<->v5844(VarCurr)).
% 297.34/295.55  all VarCurr (v5848(VarCurr)<->v5849(VarCurr)|v5851(VarCurr)).
% 297.34/295.55  all VarCurr (v5851(VarCurr)<->v5852(VarCurr)&v5853(VarCurr)).
% 297.34/295.55  all VarCurr (-v5853(VarCurr)<->v5849(VarCurr)).
% 297.34/295.55  all VarCurr (v5852(VarCurr)<->v5789(VarCurr)&v5791(VarCurr)).
% 297.34/295.55  all VarCurr (v5849(VarCurr)<->v5789(VarCurr)&v5850(VarCurr)).
% 297.34/295.55  all VarCurr (-v5850(VarCurr)<->v5791(VarCurr)).
% 297.34/295.55  all VarCurr (v5844(VarCurr)<->v5845(VarCurr)&v5846(VarCurr)).
% 297.34/295.55  all VarCurr (-v5846(VarCurr)<->v5791(VarCurr)).
% 297.34/295.55  all VarCurr (-v5845(VarCurr)<->v5789(VarCurr)).
% 297.34/295.55  v5787(constB0)<->$F.
% 297.34/295.55  all VarCurr (v5827(VarCurr)<->v4517(VarCurr,bitIndex2)).
% 297.34/295.55  all VarCurr (v4517(VarCurr,bitIndex2)<->v5829(VarCurr)).
% 297.34/295.55  all VarCurr (v5829(VarCurr)<->v5831(VarCurr)).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5836(VarNext)-> (v5831(VarNext)<->v5831(VarCurr)))).
% 297.34/295.55  all VarNext (v5836(VarNext)-> (v5831(VarNext)<->v4734(VarNext))).
% 297.34/295.55  all VarCurr (v5836(VarCurr)<->v4738(VarCurr)&v5837(VarCurr)).
% 297.34/295.55  all VarCurr (-v5837(VarCurr)<->v5833(VarCurr)).
% 297.34/295.55  v5831(constB0)<->$F.
% 297.34/295.55  all VarCurr (v5833(VarCurr)<->v184(VarCurr,bitIndex0)).
% 297.34/295.55  all VarCurr (v5793(VarCurr)<->v771(VarCurr,bitIndex1)).
% 297.34/295.55  all VarCurr (v771(VarCurr,bitIndex1)<->v773(VarCurr,bitIndex1)).
% 297.34/295.55  all VarCurr (v773(VarCurr,bitIndex1)<->v4466(VarCurr,bitIndex1)).
% 297.34/295.55  all VarCurr (v781(VarCurr,bitIndex1)<->v4465(VarCurr,bitIndex1)).
% 297.34/295.55  all VarCurr (v4458(VarCurr,bitIndex1)<->v4460(VarCurr,bitIndex1)).
% 297.34/295.55  all VarCurr (v4460(VarCurr,bitIndex1)<->v4462(VarCurr,bitIndex1)).
% 297.34/295.55  all VarCurr (v4462(VarCurr,bitIndex1)<->v4464(VarCurr,bitIndex1)).
% 297.34/295.55  all VarCurr (v4464(VarCurr,bitIndex1)<->v1212(VarCurr,bitIndex1)).
% 297.34/295.55  all VarCurr (v783(VarCurr,bitIndex1)<->v785(VarCurr,bitIndex1)).
% 297.34/295.55  all VarCurr (v785(VarCurr,bitIndex1)<->v787(VarCurr,bitIndex1)).
% 297.34/295.55  all VarCurr (v787(VarCurr,bitIndex1)<->v789(VarCurr,bitIndex1)).
% 297.34/295.55  all VarCurr (v789(VarCurr,bitIndex1)<->v4455(VarCurr,bitIndex1)).
% 297.34/295.55  all VarCurr (v4456(VarCurr,bitIndex1)<->v4743(VarCurr,bitIndex1)).
% 297.34/295.55  all VarCurr (v4743(VarCurr,bitIndex1)<->v4745(VarCurr,bitIndex1)).
% 297.34/295.55  all VarCurr (v4745(VarCurr,bitIndex1)<->v5746(VarCurr,bitIndex1)).
% 297.34/295.55  all VarCurr (v5669(VarCurr,bitIndex1)<->v5736(VarCurr,bitIndex1)).
% 297.34/295.55  all VarNext (v5015(VarNext,bitIndex27)<->v5819(VarNext,bitIndex27)).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5821(VarNext)-> (all B (range_61_0(B)-> (v5819(VarNext,B)<->v5015(VarCurr,B)))))).
% 297.34/295.55  all VarNext (v5821(VarNext)-> (all B (range_61_0(B)-> (v5819(VarNext,B)<->v5657(VarNext,B))))).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5821(VarNext)<->v5822(VarNext)&v5650(VarNext))).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5822(VarNext)<->v5824(VarNext)&v5637(VarNext))).
% 297.34/295.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5824(VarNext)<->v5644(VarNext))).
% 297.34/295.55  all VarCurr (v5028(VarCurr,bitIndex27)<->v5030(VarCurr,bitIndex27)).
% 297.34/295.55  all VarCurr (v5030(VarCurr,bitIndex27)<->v5032(VarCurr,bitIndex27)).
% 297.34/295.55  all VarCurr (v5032(VarCurr,bitIndex27)<->v5034(VarCurr,bitIndex27)).
% 297.34/295.55  all VarCurr (v5034(VarCurr,bitIndex27)<->v5632(VarCurr,bitIndex27)).
% 297.34/295.56  all VarCurr (v5045(VarCurr,bitIndex16)<->v5619(VarCurr,bitIndex16)).
% 297.34/295.56  all VarCurr (v5076(VarCurr,bitIndex13)<->v5107(VarCurr,bitIndex13)).
% 297.34/295.56  all VarCurr (v5076(VarCurr,bitIndex16)<->v5107(VarCurr,bitIndex16)).
% 297.34/295.56  all VarNext (v5015(VarNext,bitIndex21)<->v5811(VarNext,bitIndex21)).
% 297.34/295.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5813(VarNext)-> (all B (range_61_0(B)-> (v5811(VarNext,B)<->v5015(VarCurr,B)))))).
% 297.34/295.56  all VarNext (v5813(VarNext)-> (all B (range_61_0(B)-> (v5811(VarNext,B)<->v5657(VarNext,B))))).
% 297.34/295.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5813(VarNext)<->v5814(VarNext)&v5650(VarNext))).
% 297.34/295.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5814(VarNext)<->v5816(VarNext)&v5637(VarNext))).
% 297.34/295.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5816(VarNext)<->v5644(VarNext))).
% 297.34/295.56  all VarCurr (v5028(VarCurr,bitIndex21)<->v5030(VarCurr,bitIndex21)).
% 297.34/295.56  all VarCurr (v5030(VarCurr,bitIndex21)<->v5032(VarCurr,bitIndex21)).
% 297.34/295.56  all VarCurr (v5032(VarCurr,bitIndex21)<->v5034(VarCurr,bitIndex21)).
% 297.34/295.56  all VarCurr (v5034(VarCurr,bitIndex21)<->v5632(VarCurr,bitIndex21)).
% 297.34/295.56  all VarCurr (v5045(VarCurr,bitIndex10)<->v5619(VarCurr,bitIndex10)).
% 297.34/295.56  all VarCurr (v5076(VarCurr,bitIndex7)<->v5107(VarCurr,bitIndex7)).
% 297.34/295.56  all VarCurr (v5076(VarCurr,bitIndex10)<->v5107(VarCurr,bitIndex10)).
% 297.34/295.56  all VarNext (v5015(VarNext,bitIndex12)<->v5803(VarNext,bitIndex12)).
% 297.34/295.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5805(VarNext)-> (all B (range_61_0(B)-> (v5803(VarNext,B)<->v5015(VarCurr,B)))))).
% 297.34/295.56  all VarNext (v5805(VarNext)-> (all B (range_61_0(B)-> (v5803(VarNext,B)<->v5657(VarNext,B))))).
% 297.34/295.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5805(VarNext)<->v5806(VarNext)&v5650(VarNext))).
% 297.34/295.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5806(VarNext)<->v5808(VarNext)&v5637(VarNext))).
% 297.34/295.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5808(VarNext)<->v5644(VarNext))).
% 297.34/295.56  all VarCurr (v5028(VarCurr,bitIndex12)<->v5030(VarCurr,bitIndex12)).
% 297.34/295.56  all VarCurr (v5030(VarCurr,bitIndex12)<->v5032(VarCurr,bitIndex12)).
% 297.34/295.56  all VarCurr (v5032(VarCurr,bitIndex12)<->v5034(VarCurr,bitIndex12)).
% 297.34/295.56  all VarCurr (v5034(VarCurr,bitIndex12)<->v5632(VarCurr,bitIndex12)).
% 297.34/295.56  all VarCurr (v5045(VarCurr,bitIndex1)<->v5619(VarCurr,bitIndex1)).
% 297.34/295.56  all VarNext (v5015(VarNext,bitIndex15)<->v5795(VarNext,bitIndex15)).
% 297.34/295.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5797(VarNext)-> (all B (range_61_0(B)-> (v5795(VarNext,B)<->v5015(VarCurr,B)))))).
% 297.34/295.56  all VarNext (v5797(VarNext)-> (all B (range_61_0(B)-> (v5795(VarNext,B)<->v5657(VarNext,B))))).
% 297.34/295.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5797(VarNext)<->v5798(VarNext)&v5650(VarNext))).
% 297.34/295.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5798(VarNext)<->v5800(VarNext)&v5637(VarNext))).
% 297.34/295.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5800(VarNext)<->v5644(VarNext))).
% 297.34/295.56  all VarCurr (v5028(VarCurr,bitIndex15)<->v5030(VarCurr,bitIndex15)).
% 297.34/295.56  all VarCurr (v5030(VarCurr,bitIndex15)<->v5032(VarCurr,bitIndex15)).
% 297.34/295.56  all VarCurr (v5032(VarCurr,bitIndex15)<->v5034(VarCurr,bitIndex15)).
% 297.34/295.56  all VarCurr (v5034(VarCurr,bitIndex15)<->v5632(VarCurr,bitIndex15)).
% 297.34/295.56  all VarCurr (v5045(VarCurr,bitIndex4)<->v5619(VarCurr,bitIndex4)).
% 297.34/295.56  all VarCurr (v5076(VarCurr,bitIndex1)<->v5107(VarCurr,bitIndex1)).
% 297.34/295.56  all VarCurr (v5076(VarCurr,bitIndex4)<->v5107(VarCurr,bitIndex4)).
% 297.34/295.56  all VarCurr (v775(VarCurr,bitIndex1)<->v777(VarCurr,bitIndex1)).
% 297.34/295.56  all VarCurr (v777(VarCurr,bitIndex1)<->v779(VarCurr,bitIndex1)).
% 297.34/295.56  all VarCurr (v5791(VarCurr)<->v103(VarCurr,bitIndex0)).
% 297.34/295.56  all VarCurr (v5789(VarCurr)<->v85(VarCurr,bitIndex0)).
% 297.34/295.56  all VarCurr (v4732(VarCurr)<->v4734(VarCurr)).
% 297.34/295.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5766(VarNext)-> (v4734(VarNext)<->v4734(VarCurr)))).
% 297.34/295.56  all VarNext (v5766(VarNext)-> (v4734(VarNext)<->v5778(VarNext))).
% 297.34/295.56  all VarCurr (-v5767(VarCurr)-> (v5778(VarCurr)<->v5779(VarCurr))).
% 297.34/295.56  all VarCurr (v5767(VarCurr)-> (v5778(VarCurr)<->v4740(VarCurr))).
% 297.34/295.56  all VarCurr (-v5772(VarCurr)-> (v5779(VarCurr)<->v5750(VarCurr))).
% 297.34/295.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5772(VarCurr)-> (v5779(VarCurr)<->x166(VarNext)))).
% 297.34/295.56  all VarCurr (v5766(VarCurr)<->v5767(VarCurr)|v5770(VarCurr)).
% 297.34/295.57  all VarCurr (v5770(VarCurr)<->v5771(VarCurr)&v5777(VarCurr)).
% 297.34/295.57  all VarCurr (-v5777(VarCurr)<->v5767(VarCurr)).
% 297.34/295.57  all VarCurr (v5771(VarCurr)<->v5772(VarCurr)|v5774(VarCurr)).
% 297.34/295.57  all VarCurr (v5774(VarCurr)<->v5775(VarCurr)&v5776(VarCurr)).
% 297.34/295.57  all VarCurr (-v5776(VarCurr)<->v5772(VarCurr)).
% 297.34/295.57  all VarCurr (v5775(VarCurr)<->v4736(VarCurr)&v4738(VarCurr)).
% 297.34/295.57  all VarCurr (v5772(VarCurr)<->v4736(VarCurr)&v5773(VarCurr)).
% 297.34/295.57  all VarCurr (-v5773(VarCurr)<->v4738(VarCurr)).
% 297.34/295.57  all VarCurr (v5767(VarCurr)<->v5768(VarCurr)&v5769(VarCurr)).
% 297.34/295.57  all VarCurr (-v5769(VarCurr)<->v4738(VarCurr)).
% 297.34/295.57  all VarCurr (-v5768(VarCurr)<->v4736(VarCurr)).
% 297.34/295.57  v4734(constB0)<->$F.
% 297.34/295.57  all VarCurr (v5750(VarCurr)<->v4517(VarCurr,bitIndex3)).
% 297.34/295.57  all VarCurr (v4517(VarCurr,bitIndex3)<->v5752(VarCurr)).
% 297.34/295.57  all VarCurr (v5752(VarCurr)<->v5754(VarCurr)).
% 297.34/295.57  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5759(VarNext)-> (v5754(VarNext)<->v5754(VarCurr)))).
% 297.34/295.57  all VarNext (v5759(VarNext)-> (v5754(VarNext)<->v4689(VarNext))).
% 297.34/295.57  all VarCurr (v5759(VarCurr)<->v4693(VarCurr)&v5760(VarCurr)).
% 297.34/295.57  all VarCurr (-v5760(VarCurr)<->v5756(VarCurr)).
% 297.34/295.57  v5754(constB0)<->$F.
% 297.34/295.57  all VarCurr (v5756(VarCurr)<->v184(VarCurr,bitIndex0)).
% 297.34/295.57  all VarCurr (v4740(VarCurr)<->v771(VarCurr,bitIndex2)).
% 297.34/295.57  all VarCurr (v771(VarCurr,bitIndex2)<->v773(VarCurr,bitIndex2)).
% 297.34/295.57  all VarCurr (v773(VarCurr,bitIndex2)<->v4466(VarCurr,bitIndex2)).
% 297.34/295.57  all VarCurr (v781(VarCurr,bitIndex2)<->v4465(VarCurr,bitIndex2)).
% 297.34/295.57  all VarCurr (v4458(VarCurr,bitIndex2)<->v4460(VarCurr,bitIndex2)).
% 297.34/295.57  all VarCurr (v4460(VarCurr,bitIndex2)<->v4462(VarCurr,bitIndex2)).
% 297.34/295.57  all VarCurr (v4462(VarCurr,bitIndex2)<->v4464(VarCurr,bitIndex2)).
% 297.34/295.57  all VarCurr (v4464(VarCurr,bitIndex2)<->v1212(VarCurr,bitIndex2)).
% 297.34/295.57  all VarCurr (v783(VarCurr,bitIndex2)<->v785(VarCurr,bitIndex2)).
% 297.34/295.57  all VarCurr (v785(VarCurr,bitIndex2)<->v787(VarCurr,bitIndex2)).
% 297.34/295.57  all VarCurr (v787(VarCurr,bitIndex2)<->v789(VarCurr,bitIndex2)).
% 297.34/295.57  all VarCurr (v789(VarCurr,bitIndex2)<->v4455(VarCurr,bitIndex2)).
% 297.34/295.57  all VarCurr (v4456(VarCurr,bitIndex2)<->v4743(VarCurr,bitIndex2)).
% 297.34/295.57  all VarCurr (v4743(VarCurr,bitIndex2)<->v4745(VarCurr,bitIndex2)).
% 297.34/295.57  all VarCurr (v4745(VarCurr,bitIndex2)<->v5746(VarCurr,bitIndex2)).
% 297.34/295.57  all VarCurr (-v5747(VarCurr)-> (all B (range_2_0(B)-> (v5746(VarCurr,B)<->v5669(VarCurr,B))))).
% 297.34/295.57  all VarCurr (v5747(VarCurr)-> (all B (range_2_0(B)-> (v5746(VarCurr,B)<->v5748(VarCurr,B))))).
% 297.34/295.57  all VarCurr (-v4897(VarCurr)-> (v5748(VarCurr,bitIndex2)<->v5015(VarCurr,bitIndex13))& (v5748(VarCurr,bitIndex1)<->v5015(VarCurr,bitIndex12))& (v5748(VarCurr,bitIndex0)<->v5015(VarCurr,bitIndex11))).
% 297.34/295.57  all VarCurr (v4897(VarCurr)-> (v5748(VarCurr,bitIndex2)<->v5015(VarCurr,bitIndex16))& (v5748(VarCurr,bitIndex1)<->v5015(VarCurr,bitIndex15))& (v5748(VarCurr,bitIndex0)<->v5015(VarCurr,bitIndex14))).
% 297.34/295.57  all VarCurr (-v5747(VarCurr)<->v4747(VarCurr)).
% 297.34/295.57  all VarCurr (v5669(VarCurr,bitIndex2)<->v5736(VarCurr,bitIndex2)).
% 297.34/295.57  all VarCurr (-v5737(VarCurr)& -v5738(VarCurr)& -v5739(VarCurr)& -v5740(VarCurr)& -v5741(VarCurr)& -v5742(VarCurr)& -v5743(VarCurr)-> (all B (range_2_0(B)-> (v5736(VarCurr,B)<->$F)))).
% 297.34/295.57  all VarCurr (v5743(VarCurr)-> (all B (range_2_0(B)-> (v5736(VarCurr,B)<->$F)))).
% 297.34/295.57  all VarCurr (v5742(VarCurr)-> (v5736(VarCurr,bitIndex2)<->v5015(VarCurr,bitIndex28))& (v5736(VarCurr,bitIndex1)<->v5015(VarCurr,bitIndex27))& (v5736(VarCurr,bitIndex0)<->v5015(VarCurr,bitIndex26))).
% 297.34/295.57  all VarCurr (v5741(VarCurr)-> (all B (range_2_0(B)-> (v5736(VarCurr,B)<->$F)))).
% 297.34/295.57  all VarCurr (v5740(VarCurr)-> (v5736(VarCurr,bitIndex2)<->v5015(VarCurr,bitIndex22))& (v5736(VarCurr,bitIndex1)<->v5015(VarCurr,bitIndex21))& (v5736(VarCurr,bitIndex0)<->v5015(VarCurr,bitIndex20))).
% 297.34/295.57  all VarCurr (v5739(VarCurr)-> (all B (range_2_0(B)-> (v5736(VarCurr,B)<->$F)))).
% 297.34/295.57  all VarCurr (v5738(VarCurr)-> (v5736(VarCurr,bitIndex2)<->v5015(VarCurr,bitIndex16))& (v5736(VarCurr,bitIndex1)<->v5015(VarCurr,bitIndex15))& (v5736(VarCurr,bitIndex0)<->v5015(VarCurr,bitIndex14))).
% 297.34/295.57  all VarCurr (v5737(VarCurr)-> (v5736(VarCurr,bitIndex2)<->v5015(VarCurr,bitIndex13))& (v5736(VarCurr,bitIndex1)<->v5015(VarCurr,bitIndex12))& (v5736(VarCurr,bitIndex0)<->v5015(VarCurr,bitIndex11))).
% 297.34/295.58  all VarCurr (v5743(VarCurr)<->v5744(VarCurr)|v5745(VarCurr)).
% 297.34/295.58  all VarCurr (v5745(VarCurr)<-> (v5671(VarCurr,bitIndex2)<->$T)& (v5671(VarCurr,bitIndex1)<->$T)& (v5671(VarCurr,bitIndex0)<->$T)).
% 297.34/295.58  all VarCurr (v5744(VarCurr)<-> (v5671(VarCurr,bitIndex2)<->$T)& (v5671(VarCurr,bitIndex1)<->$T)& (v5671(VarCurr,bitIndex0)<->$F)).
% 297.34/295.58  all VarCurr (v5742(VarCurr)<-> (v5671(VarCurr,bitIndex2)<->$T)& (v5671(VarCurr,bitIndex1)<->$F)& (v5671(VarCurr,bitIndex0)<->$T)).
% 297.34/295.58  all VarCurr (v5741(VarCurr)<-> (v5671(VarCurr,bitIndex2)<->$T)& (v5671(VarCurr,bitIndex1)<->$F)& (v5671(VarCurr,bitIndex0)<->$F)).
% 297.34/295.58  all VarCurr (v5740(VarCurr)<-> (v5671(VarCurr,bitIndex2)<->$F)& (v5671(VarCurr,bitIndex1)<->$T)& (v5671(VarCurr,bitIndex0)<->$T)).
% 297.34/295.58  all VarCurr (v5739(VarCurr)<-> (v5671(VarCurr,bitIndex2)<->$F)& (v5671(VarCurr,bitIndex1)<->$T)& (v5671(VarCurr,bitIndex0)<->$F)).
% 297.34/295.58  all VarCurr (v5738(VarCurr)<-> (v5671(VarCurr,bitIndex2)<->$F)& (v5671(VarCurr,bitIndex1)<->$F)& (v5671(VarCurr,bitIndex0)<->$T)).
% 297.34/295.58  all VarCurr (v5737(VarCurr)<-> (v5671(VarCurr,bitIndex2)<->$F)& (v5671(VarCurr,bitIndex1)<->$F)& (v5671(VarCurr,bitIndex0)<->$F)).
% 297.34/295.58  all VarNext (v5015(VarNext,bitIndex28)<->v5729(VarNext,bitIndex28)).
% 297.34/295.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5731(VarNext)-> (all B (range_61_0(B)-> (v5729(VarNext,B)<->v5015(VarCurr,B)))))).
% 297.34/295.58  all VarNext (v5731(VarNext)-> (all B (range_61_0(B)-> (v5729(VarNext,B)<->v5657(VarNext,B))))).
% 297.34/295.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5731(VarNext)<->v5732(VarNext)&v5650(VarNext))).
% 297.34/295.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5732(VarNext)<->v5734(VarNext)&v5637(VarNext))).
% 297.34/295.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5734(VarNext)<->v5644(VarNext))).
% 297.34/295.58  all VarCurr (v5028(VarCurr,bitIndex28)<->v5030(VarCurr,bitIndex28)).
% 297.34/295.58  all VarCurr (v5030(VarCurr,bitIndex28)<->v5032(VarCurr,bitIndex28)).
% 297.34/295.58  all VarCurr (v5032(VarCurr,bitIndex28)<->v5034(VarCurr,bitIndex28)).
% 297.34/295.58  all VarCurr (v5034(VarCurr,bitIndex28)<->v5632(VarCurr,bitIndex28)).
% 297.34/295.58  all VarCurr (v5045(VarCurr,bitIndex17)<->v5619(VarCurr,bitIndex17)).
% 297.34/295.58  all VarCurr (v5076(VarCurr,bitIndex14)<->v5107(VarCurr,bitIndex14)).
% 297.34/295.58  all VarCurr (v5076(VarCurr,bitIndex17)<->v5107(VarCurr,bitIndex17)).
% 297.34/295.58  all VarCurr ((v5094(VarCurr,bitIndex17)<->v5049(VarCurr,bitIndex24))& (v5094(VarCurr,bitIndex16)<->v5049(VarCurr,bitIndex23))& (v5094(VarCurr,bitIndex15)<->v5049(VarCurr,bitIndex22))& (v5094(VarCurr,bitIndex14)<->v5049(VarCurr,bitIndex21))& (v5094(VarCurr,bitIndex13)<->v5049(VarCurr,bitIndex20))& (v5094(VarCurr,bitIndex12)<->v5049(VarCurr,bitIndex19))).
% 297.34/295.58  all VarCurr B (range_24_19(B)-> (v5049(VarCurr,B)<->v5074(VarCurr,B))).
% 297.34/295.58  all VarCurr B (range_24_19(B)-> (v5065(VarCurr,B)<->v5067(VarCurr,B))).
% 297.34/295.58  all VarCurr (v5067(VarCurr,bitIndex24)<->v5717(VarCurr)).
% 297.34/295.58  v5717(constB0)<->$F.
% 297.34/295.58  all VarCurr (v5067(VarCurr,bitIndex23)<->v5719(VarCurr)).
% 297.34/295.58  v5719(constB0)<->$F.
% 297.34/295.58  all VarCurr (v5067(VarCurr,bitIndex22)<->v5721(VarCurr)).
% 297.34/295.58  v5721(constB0)<->$F.
% 297.34/295.58  all VarCurr (v5067(VarCurr,bitIndex21)<->v5723(VarCurr)).
% 297.34/295.58  v5723(constB0)<->$F.
% 297.34/295.58  all VarCurr (v5067(VarCurr,bitIndex20)<->v5725(VarCurr)).
% 297.34/295.58  v5725(constB0)<->$F.
% 297.34/295.58  all VarCurr (v5067(VarCurr,bitIndex19)<->v5727(VarCurr)).
% 297.34/295.58  v5727(constB0)<->$F.
% 297.34/295.58  all VarCurr B (range_24_19(B)-> (v5057(VarCurr,B)<->v5059(VarCurr,B))).
% 297.34/295.58  all VarCurr B (range_24_19(B)-> (v5059(VarCurr,B)<->v5061(VarCurr,B))).
% 297.34/295.58  all VarCurr B (range_24_19(B)-> (v5061(VarCurr,B)<->v5063(VarCurr,B))).
% 297.34/295.58  all VarCurr B (range_24_19(B)-> (v5063(VarCurr,B)<->v543(VarCurr,B))).
% 297.34/295.58  all B (range_24_19(B)<->bitIndex19=B|bitIndex20=B|bitIndex21=B|bitIndex22=B|bitIndex23=B|bitIndex24=B).
% 297.34/295.58  all VarCurr B (range_17_12(B)-> (v5078(VarCurr,B)<->v5079(VarCurr,B))).
% 297.34/295.58  all B (range_17_12(B)<->bitIndex12=B|bitIndex13=B|bitIndex14=B|bitIndex15=B|bitIndex16=B|bitIndex17=B).
% 297.34/295.58  all VarNext (v5015(VarNext,bitIndex22)<->v5709(VarNext,bitIndex22)).
% 297.34/295.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5711(VarNext)-> (all B (range_61_0(B)-> (v5709(VarNext,B)<->v5015(VarCurr,B)))))).
% 297.34/295.58  all VarNext (v5711(VarNext)-> (all B (range_61_0(B)-> (v5709(VarNext,B)<->v5657(VarNext,B))))).
% 297.34/295.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5711(VarNext)<->v5712(VarNext)&v5650(VarNext))).
% 297.34/295.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5712(VarNext)<->v5714(VarNext)&v5637(VarNext))).
% 297.34/295.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5714(VarNext)<->v5644(VarNext))).
% 297.34/295.58  all VarCurr (v5028(VarCurr,bitIndex22)<->v5030(VarCurr,bitIndex22)).
% 297.34/295.58  all VarCurr (v5030(VarCurr,bitIndex22)<->v5032(VarCurr,bitIndex22)).
% 297.34/295.58  all VarCurr (v5032(VarCurr,bitIndex22)<->v5034(VarCurr,bitIndex22)).
% 297.34/295.58  all VarCurr (v5034(VarCurr,bitIndex22)<->v5632(VarCurr,bitIndex22)).
% 297.34/295.58  all VarCurr (v5045(VarCurr,bitIndex11)<->v5619(VarCurr,bitIndex11)).
% 297.34/295.58  all VarCurr (v5076(VarCurr,bitIndex8)<->v5107(VarCurr,bitIndex8)).
% 297.34/295.58  all VarCurr (v5076(VarCurr,bitIndex11)<->v5107(VarCurr,bitIndex11)).
% 297.34/295.58  all VarCurr ((v5094(VarCurr,bitIndex11)<->v5049(VarCurr,bitIndex18))& (v5094(VarCurr,bitIndex10)<->v5049(VarCurr,bitIndex17))& (v5094(VarCurr,bitIndex9)<->v5049(VarCurr,bitIndex16))& (v5094(VarCurr,bitIndex8)<->v5049(VarCurr,bitIndex15))& (v5094(VarCurr,bitIndex7)<->v5049(VarCurr,bitIndex14))& (v5094(VarCurr,bitIndex6)<->v5049(VarCurr,bitIndex13))).
% 297.34/295.58  all VarCurr B (range_18_13(B)-> (v5049(VarCurr,B)<->v5074(VarCurr,B))).
% 297.34/295.58  all VarCurr B (range_18_13(B)-> (v5065(VarCurr,B)<->v5067(VarCurr,B))).
% 297.34/295.58  all VarCurr (v5067(VarCurr,bitIndex18)<->v5697(VarCurr)).
% 297.34/295.58  v5697(constB0)<->$F.
% 297.34/295.58  all VarCurr (v5067(VarCurr,bitIndex17)<->v5699(VarCurr)).
% 297.34/295.58  v5699(constB0)<->$F.
% 297.34/295.58  all VarCurr (v5067(VarCurr,bitIndex16)<->v5701(VarCurr)).
% 297.34/295.58  v5701(constB0)<->$F.
% 297.34/295.58  all VarCurr (v5067(VarCurr,bitIndex15)<->v5703(VarCurr)).
% 297.34/295.58  v5703(constB0)<->$F.
% 297.34/295.58  all VarCurr (v5067(VarCurr,bitIndex14)<->v5705(VarCurr)).
% 297.34/295.58  v5705(constB0)<->$F.
% 297.34/295.58  all VarCurr (v5067(VarCurr,bitIndex13)<->v5707(VarCurr)).
% 297.34/295.58  v5707(constB0)<->$F.
% 297.34/295.58  all VarCurr B (range_18_13(B)-> (v5057(VarCurr,B)<->v5059(VarCurr,B))).
% 297.34/295.58  all VarCurr B (range_18_13(B)-> (v5059(VarCurr,B)<->v5061(VarCurr,B))).
% 297.34/295.58  all VarCurr B (range_18_13(B)-> (v5061(VarCurr,B)<->v5063(VarCurr,B))).
% 297.34/295.58  all VarCurr B (range_18_13(B)-> (v5063(VarCurr,B)<->v543(VarCurr,B))).
% 297.34/295.58  all B (range_18_13(B)<->bitIndex13=B|bitIndex14=B|bitIndex15=B|bitIndex16=B|bitIndex17=B|bitIndex18=B).
% 297.34/295.58  all VarCurr B (range_11_6(B)-> (v5078(VarCurr,B)<->v5079(VarCurr,B))).
% 297.34/295.58  all B (range_11_6(B)<->bitIndex6=B|bitIndex7=B|bitIndex8=B|bitIndex9=B|bitIndex10=B|bitIndex11=B).
% 297.34/295.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5680(VarNext)-> (all B (range_2_0(B)-> (v5671(VarNext,B)<->v5671(VarCurr,B)))))).
% 297.34/295.58  all VarNext (v5680(VarNext)-> (all B (range_2_0(B)-> (v5671(VarNext,B)<->v5693(VarNext,B))))).
% 297.34/295.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_2_0(B)-> (v5693(VarNext,B)<->v5691(VarCurr,B))))).
% 297.34/295.58  all VarCurr (-v5651(VarCurr)-> (all B (range_2_0(B)-> (v5691(VarCurr,B)<->v5673(VarCurr,B))))).
% 297.34/295.58  all VarCurr (v5651(VarCurr)-> (all B (range_2_0(B)-> (v5691(VarCurr,B)<->$F)))).
% 297.34/295.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5680(VarNext)<->v5681(VarNext)&v5688(VarNext))).
% 297.34/295.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5688(VarNext)<->v5686(VarCurr))).
% 297.34/295.58  all VarCurr (v5686(VarCurr)<->v5651(VarCurr)|v5689(VarCurr)).
% 297.34/295.58  all VarCurr (v5689(VarCurr)<->v5653(VarCurr)&v5690(VarCurr)).
% 297.34/295.58  all VarCurr (-v5690(VarCurr)<->v5651(VarCurr)).
% 297.34/295.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5681(VarNext)<->v5683(VarNext)&v5637(VarNext))).
% 297.34/295.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5683(VarNext)<->v5644(VarNext))).
% 297.34/295.58  all B (range_2_0(B)-> (v5671(constB0,B)<->$F)).
% 297.34/295.58  all VarCurr B (range_2_0(B)-> (v5673(VarCurr,B)<->v5675(VarCurr,B))).
% 297.34/295.58  all VarCurr B (range_2_0(B)-> (v5675(VarCurr,B)<->v5677(VarCurr,B))).
% 297.34/295.58  all VarCurr B (range_2_0(B)-> (v5677(VarCurr,B)<->v5047(VarCurr,B))).
% 297.34/295.58  all VarNext (v5015(VarNext,bitIndex13)<->v5661(VarNext,bitIndex13)).
% 297.34/295.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5663(VarNext)-> (all B (range_61_0(B)-> (v5661(VarNext,B)<->v5015(VarCurr,B)))))).
% 297.34/295.58  all VarNext (v5663(VarNext)-> (all B (range_61_0(B)-> (v5661(VarNext,B)<->v5657(VarNext,B))))).
% 297.34/295.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5663(VarNext)<->v5664(VarNext)&v5650(VarNext))).
% 297.34/295.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5664(VarNext)<->v5666(VarNext)&v5637(VarNext))).
% 297.34/295.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5666(VarNext)<->v5644(VarNext))).
% 297.39/295.59  all VarCurr (v5028(VarCurr,bitIndex13)<->v5030(VarCurr,bitIndex13)).
% 297.39/295.59  all VarCurr (v5030(VarCurr,bitIndex13)<->v5032(VarCurr,bitIndex13)).
% 297.39/295.59  all VarCurr (v5032(VarCurr,bitIndex13)<->v5034(VarCurr,bitIndex13)).
% 297.39/295.59  all VarCurr (v5034(VarCurr,bitIndex13)<->v5632(VarCurr,bitIndex13)).
% 297.39/295.59  all VarCurr (v5045(VarCurr,bitIndex2)<->v5619(VarCurr,bitIndex2)).
% 297.39/295.59  all VarNext (v5015(VarNext,bitIndex16)<->v5639(VarNext,bitIndex16)).
% 297.39/295.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5640(VarNext)-> (all B (range_61_0(B)-> (v5639(VarNext,B)<->v5015(VarCurr,B)))))).
% 297.39/295.59  all VarNext (v5640(VarNext)-> (all B (range_61_0(B)-> (v5639(VarNext,B)<->v5657(VarNext,B))))).
% 297.39/295.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_61_0(B)-> (v5657(VarNext,B)<->v5655(VarCurr,B))))).
% 297.39/295.59  all VarCurr (-v5651(VarCurr)-> (all B (range_61_0(B)-> (v5655(VarCurr,B)<->v5028(VarCurr,B))))).
% 297.39/295.59  all VarCurr (v5651(VarCurr)-> (all B (range_61_0(B)-> (v5655(VarCurr,B)<->$F)))).
% 297.39/295.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5640(VarNext)<->v5641(VarNext)&v5650(VarNext))).
% 297.39/295.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5650(VarNext)<->v5648(VarCurr))).
% 297.39/295.59  all VarCurr (v5648(VarCurr)<->v5651(VarCurr)|v5652(VarCurr)).
% 297.39/295.59  all VarCurr (v5652(VarCurr)<->v5653(VarCurr)&v5654(VarCurr)).
% 297.39/295.59  all VarCurr (-v5654(VarCurr)<->v5651(VarCurr)).
% 297.39/295.59  all VarCurr (-v5653(VarCurr)<->v5020(VarCurr)).
% 297.39/295.59  all VarCurr (-v5651(VarCurr)<->v5017(VarCurr)).
% 297.39/295.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5641(VarNext)<->v5642(VarNext)&v5637(VarNext))).
% 297.39/295.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v5642(VarNext)<->v5644(VarNext))).
% 297.39/295.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5644(VarNext)<->v5637(VarCurr))).
% 297.39/295.59  all B (range_61_0(B)-> (v5015(constB0,B)<->$F)).
% 297.39/295.59  all B (range_61_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B|bitIndex6=B|bitIndex7=B|bitIndex8=B|bitIndex9=B|bitIndex10=B|bitIndex11=B|bitIndex12=B|bitIndex13=B|bitIndex14=B|bitIndex15=B|bitIndex16=B|bitIndex17=B|bitIndex18=B|bitIndex19=B|bitIndex20=B|bitIndex21=B|bitIndex22=B|bitIndex23=B|bitIndex24=B|bitIndex25=B|bitIndex26=B|bitIndex27=B|bitIndex28=B|bitIndex29=B|bitIndex30=B|bitIndex31=B|bitIndex32=B|bitIndex33=B|bitIndex34=B|bitIndex35=B|bitIndex36=B|bitIndex37=B|bitIndex38=B|bitIndex39=B|bitIndex40=B|bitIndex41=B|bitIndex42=B|bitIndex43=B|bitIndex44=B|bitIndex45=B|bitIndex46=B|bitIndex47=B|bitIndex48=B|bitIndex49=B|bitIndex50=B|bitIndex51=B|bitIndex52=B|bitIndex53=B|bitIndex54=B|bitIndex55=B|bitIndex56=B|bitIndex57=B|bitIndex58=B|bitIndex59=B|bitIndex60=B|bitIndex61=B).
% 297.39/295.59  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex61).
% 297.39/295.59  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex60).
% 297.39/295.59  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex59).
% 297.39/295.59  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex58).
% 297.39/295.59  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex57).
% 297.39/295.59  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex56).
% 297.39/295.59  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex55).
% 297.39/295.59  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex54).
% 297.39/295.59  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex53).
% 297.39/295.59  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex52).
% 297.39/295.59  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex51).
% 297.39/295.59  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex50).
% 297.39/295.59  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex49).
% 297.39/295.59  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex48).
% 297.39/295.59  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex47).
% 297.39/295.59  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex46).
% 297.39/295.59  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex45).
% 297.39/295.59  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex44).
% 297.39/295.59  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex43).
% 297.39/295.59  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex42).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex41).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex40).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex39).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex38).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex37).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex36).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex35).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex34).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex33).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex32).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex31).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex30).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex29).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex28).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex27).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex26).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex25).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex24).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex23).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex22).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex21).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex20).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex19).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex18).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex17).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex16).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex15).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex14).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex13).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex12).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex11).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex10).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex9).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex8).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex7).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex6).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex5).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex4).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex3).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex2).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex1).
% 297.39/295.60  -b00000000000000000000000000000000000000000000000000000000000000(bitIndex0).
% 297.39/295.60  all VarCurr (v5637(VarCurr)<->v274(VarCurr)).
% 297.39/295.60  all VarCurr (v5028(VarCurr,bitIndex16)<->v5030(VarCurr,bitIndex16)).
% 297.39/295.60  all VarCurr (v5030(VarCurr,bitIndex16)<->v5032(VarCurr,bitIndex16)).
% 297.39/295.60  all VarCurr (v5032(VarCurr,bitIndex16)<->v5034(VarCurr,bitIndex16)).
% 297.39/295.60  all VarCurr (v5034(VarCurr,bitIndex16)<->v5632(VarCurr,bitIndex16)).
% 297.39/295.60  all VarCurr (-v5633(VarCurr)-> (all B (range_84_0(B)-> (v5632(VarCurr,B)<->v5040(VarCurr,B))))).
% 297.39/295.60  all VarCurr (v5633(VarCurr)-> (all B (range_84_0(B)-> (v5632(VarCurr,B)<->v5635(VarCurr,B))))).
% 297.39/295.60  all VarCurr B (range_10_0(B)-> (v5635(VarCurr,B)<->v5040(VarCurr,B))).
% 297.39/295.60  all B (range_10_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B|bitIndex6=B|bitIndex7=B|bitIndex8=B|bitIndex9=B|bitIndex10=B).
% 297.39/295.60  all VarCurr ((v5635(VarCurr,bitIndex37)<->v5045(VarCurr,bitIndex26))& (v5635(VarCurr,bitIndex36)<->v5045(VarCurr,bitIndex25))& (v5635(VarCurr,bitIndex35)<->v5045(VarCurr,bitIndex24))& (v5635(VarCurr,bitIndex34)<->v5045(VarCurr,bitIndex23))& (v5635(VarCurr,bitIndex33)<->v5045(VarCurr,bitIndex22))& (v5635(VarCurr,bitIndex32)<->v5045(VarCurr,bitIndex21))& (v5635(VarCurr,bitIndex31)<->v5045(VarCurr,bitIndex20))& (v5635(VarCurr,bitIndex30)<->v5045(VarCurr,bitIndex19))& (v5635(VarCurr,bitIndex29)<->v5045(VarCurr,bitIndex18))& (v5635(VarCurr,bitIndex28)<->v5045(VarCurr,bitIndex17))& (v5635(VarCurr,bitIndex27)<->v5045(VarCurr,bitIndex16))& (v5635(VarCurr,bitIndex26)<->v5045(VarCurr,bitIndex15))& (v5635(VarCurr,bitIndex25)<->v5045(VarCurr,bitIndex14))& (v5635(VarCurr,bitIndex24)<->v5045(VarCurr,bitIndex13))& (v5635(VarCurr,bitIndex23)<->v5045(VarCurr,bitIndex12))& (v5635(VarCurr,bitIndex22)<->v5045(VarCurr,bitIndex11))& (v5635(VarCurr,bitIndex21)<->v5045(VarCurr,bitIndex10))& (v5635(VarCurr,bitIndex20)<->v5045(VarCurr,bitIndex9))& (v5635(VarCurr,bitIndex19)<->v5045(VarCurr,bitIndex8))& (v5635(VarCurr,bitIndex18)<->v5045(VarCurr,bitIndex7))& (v5635(VarCurr,bitIndex17)<->v5045(VarCurr,bitIndex6))& (v5635(VarCurr,bitIndex16)<->v5045(VarCurr,bitIndex5))& (v5635(VarCurr,bitIndex15)<->v5045(VarCurr,bitIndex4))& (v5635(VarCurr,bitIndex14)<->v5045(VarCurr,bitIndex3))& (v5635(VarCurr,bitIndex13)<->v5045(VarCurr,bitIndex2))& (v5635(VarCurr,bitIndex12)<->v5045(VarCurr,bitIndex1))& (v5635(VarCurr,bitIndex11)<->v5045(VarCurr,bitIndex0))).
% 297.39/295.61  all VarCurr B (range_84_38(B)-> (v5635(VarCurr,B)<->v5040(VarCurr,B))).
% 297.39/295.61  all B (range_84_38(B)<->bitIndex38=B|bitIndex39=B|bitIndex40=B|bitIndex41=B|bitIndex42=B|bitIndex43=B|bitIndex44=B|bitIndex45=B|bitIndex46=B|bitIndex47=B|bitIndex48=B|bitIndex49=B|bitIndex50=B|bitIndex51=B|bitIndex52=B|bitIndex53=B|bitIndex54=B|bitIndex55=B|bitIndex56=B|bitIndex57=B|bitIndex58=B|bitIndex59=B|bitIndex60=B|bitIndex61=B|bitIndex62=B|bitIndex63=B|bitIndex64=B|bitIndex65=B|bitIndex66=B|bitIndex67=B|bitIndex68=B|bitIndex69=B|bitIndex70=B|bitIndex71=B|bitIndex72=B|bitIndex73=B|bitIndex74=B|bitIndex75=B|bitIndex76=B|bitIndex77=B|bitIndex78=B|bitIndex79=B|bitIndex80=B|bitIndex81=B|bitIndex82=B|bitIndex83=B|bitIndex84=B).
% 297.39/295.61  all VarCurr (v5633(VarCurr)<->v5036(VarCurr)&v5634(VarCurr)).
% 297.39/295.61  all VarCurr (-v5634(VarCurr)<->v5038(VarCurr)).
% 297.39/295.61  all VarCurr (v5045(VarCurr,bitIndex5)<->v5619(VarCurr,bitIndex5)).
% 297.39/295.61  all VarCurr (-v5620(VarCurr)& -v5621(VarCurr)& -v5623(VarCurr)& -v5624(VarCurr)& -v5626(VarCurr)& -v5627(VarCurr)& -v5629(VarCurr)-> (all B (range_26_0(B)-> (v5619(VarCurr,B)<->$F)))).
% 297.39/295.61  all VarCurr (v5629(VarCurr)-> (all B (range_26_0(B)-> (v5619(VarCurr,B)<->$F)))).
% 297.39/295.61  all VarCurr (v5627(VarCurr)-> (all B (range_26_0(B)-> (v5619(VarCurr,B)<->v5628(VarCurr,B))))).
% 297.39/295.61  all VarCurr (v5626(VarCurr)-> (all B (range_26_0(B)-> (v5619(VarCurr,B)<->$F)))).
% 297.39/295.61  all VarCurr (v5624(VarCurr)-> (all B (range_26_0(B)-> (v5619(VarCurr,B)<->v5625(VarCurr,B))))).
% 297.39/295.61  all VarCurr (v5623(VarCurr)-> (all B (range_26_0(B)-> (v5619(VarCurr,B)<->$F)))).
% 297.39/295.61  all VarCurr (v5621(VarCurr)-> (all B (range_26_0(B)-> (v5619(VarCurr,B)<->v5622(VarCurr,B))))).
% 297.39/295.61  all VarCurr (v5620(VarCurr)-> (all B (range_26_0(B)-> (v5619(VarCurr,B)<->v5076(VarCurr,B))))).
% 297.39/295.61  all VarCurr (v5629(VarCurr)<->v5630(VarCurr)|v5631(VarCurr)).
% 297.39/295.61  all VarCurr (v5631(VarCurr)<-> (v5047(VarCurr,bitIndex2)<->$T)& (v5047(VarCurr,bitIndex1)<->$T)& (v5047(VarCurr,bitIndex0)<->$T)).
% 297.39/295.61  all VarCurr (v5630(VarCurr)<-> (v5047(VarCurr,bitIndex2)<->$T)& (v5047(VarCurr,bitIndex1)<->$T)& (v5047(VarCurr,bitIndex0)<->$F)).
% 297.39/295.61  all VarCurr ((v5628(VarCurr,bitIndex14)<->v5040(VarCurr,bitIndex25))& (v5628(VarCurr,bitIndex13)<->v5040(VarCurr,bitIndex24))& (v5628(VarCurr,bitIndex12)<->v5040(VarCurr,bitIndex23))& (v5628(VarCurr,bitIndex11)<->v5040(VarCurr,bitIndex22))& (v5628(VarCurr,bitIndex10)<->v5040(VarCurr,bitIndex21))& (v5628(VarCurr,bitIndex9)<->v5040(VarCurr,bitIndex20))& (v5628(VarCurr,bitIndex8)<->v5040(VarCurr,bitIndex19))& (v5628(VarCurr,bitIndex7)<->v5040(VarCurr,bitIndex18))& (v5628(VarCurr,bitIndex6)<->v5040(VarCurr,bitIndex17))& (v5628(VarCurr,bitIndex5)<->v5040(VarCurr,bitIndex16))& (v5628(VarCurr,bitIndex4)<->v5040(VarCurr,bitIndex15))& (v5628(VarCurr,bitIndex3)<->v5040(VarCurr,bitIndex14))& (v5628(VarCurr,bitIndex2)<->v5040(VarCurr,bitIndex13))& (v5628(VarCurr,bitIndex1)<->v5040(VarCurr,bitIndex12))& (v5628(VarCurr,bitIndex0)<->v5040(VarCurr,bitIndex11))).
% 297.39/295.61  all VarCurr ((v5628(VarCurr,bitIndex26)<->v5076(VarCurr,bitIndex11))& (v5628(VarCurr,bitIndex25)<->v5076(VarCurr,bitIndex10))& (v5628(VarCurr,bitIndex24)<->v5076(VarCurr,bitIndex9))& (v5628(VarCurr,bitIndex23)<->v5076(VarCurr,bitIndex8))& (v5628(VarCurr,bitIndex22)<->v5076(VarCurr,bitIndex7))& (v5628(VarCurr,bitIndex21)<->v5076(VarCurr,bitIndex6))& (v5628(VarCurr,bitIndex20)<->v5076(VarCurr,bitIndex5))& (v5628(VarCurr,bitIndex19)<->v5076(VarCurr,bitIndex4))& (v5628(VarCurr,bitIndex18)<->v5076(VarCurr,bitIndex3))& (v5628(VarCurr,bitIndex17)<->v5076(VarCurr,bitIndex2))& (v5628(VarCurr,bitIndex16)<->v5076(VarCurr,bitIndex1))& (v5628(VarCurr,bitIndex15)<->v5076(VarCurr,bitIndex0))).
% 297.39/295.61  all VarCurr (v5627(VarCurr)<-> (v5047(VarCurr,bitIndex2)<->$T)& (v5047(VarCurr,bitIndex1)<->$F)& (v5047(VarCurr,bitIndex0)<->$T)).
% 297.39/295.61  all VarCurr (v5626(VarCurr)<-> (v5047(VarCurr,bitIndex2)<->$T)& (v5047(VarCurr,bitIndex1)<->$F)& (v5047(VarCurr,bitIndex0)<->$F)).
% 297.39/295.61  all VarCurr ((v5625(VarCurr,bitIndex8)<->v5040(VarCurr,bitIndex19))& (v5625(VarCurr,bitIndex7)<->v5040(VarCurr,bitIndex18))& (v5625(VarCurr,bitIndex6)<->v5040(VarCurr,bitIndex17))& (v5625(VarCurr,bitIndex5)<->v5040(VarCurr,bitIndex16))& (v5625(VarCurr,bitIndex4)<->v5040(VarCurr,bitIndex15))& (v5625(VarCurr,bitIndex3)<->v5040(VarCurr,bitIndex14))& (v5625(VarCurr,bitIndex2)<->v5040(VarCurr,bitIndex13))& (v5625(VarCurr,bitIndex1)<->v5040(VarCurr,bitIndex12))& (v5625(VarCurr,bitIndex0)<->v5040(VarCurr,bitIndex11))).
% 297.39/295.61  all VarCurr ((v5625(VarCurr,bitIndex26)<->v5076(VarCurr,bitIndex17))& (v5625(VarCurr,bitIndex25)<->v5076(VarCurr,bitIndex16))& (v5625(VarCurr,bitIndex24)<->v5076(VarCurr,bitIndex15))& (v5625(VarCurr,bitIndex23)<->v5076(VarCurr,bitIndex14))& (v5625(VarCurr,bitIndex22)<->v5076(VarCurr,bitIndex13))& (v5625(VarCurr,bitIndex21)<->v5076(VarCurr,bitIndex12))& (v5625(VarCurr,bitIndex20)<->v5076(VarCurr,bitIndex11))& (v5625(VarCurr,bitIndex19)<->v5076(VarCurr,bitIndex10))& (v5625(VarCurr,bitIndex18)<->v5076(VarCurr,bitIndex9))& (v5625(VarCurr,bitIndex17)<->v5076(VarCurr,bitIndex8))& (v5625(VarCurr,bitIndex16)<->v5076(VarCurr,bitIndex7))& (v5625(VarCurr,bitIndex15)<->v5076(VarCurr,bitIndex6))& (v5625(VarCurr,bitIndex14)<->v5076(VarCurr,bitIndex5))& (v5625(VarCurr,bitIndex13)<->v5076(VarCurr,bitIndex4))& (v5625(VarCurr,bitIndex12)<->v5076(VarCurr,bitIndex3))& (v5625(VarCurr,bitIndex11)<->v5076(VarCurr,bitIndex2))& (v5625(VarCurr,bitIndex10)<->v5076(VarCurr,bitIndex1))& (v5625(VarCurr,bitIndex9)<->v5076(VarCurr,bitIndex0))).
% 297.39/295.61  all VarCurr (v5624(VarCurr)<-> (v5047(VarCurr,bitIndex2)<->$F)& (v5047(VarCurr,bitIndex1)<->$T)& (v5047(VarCurr,bitIndex0)<->$T)).
% 297.39/295.61  all VarCurr (v5623(VarCurr)<-> (v5047(VarCurr,bitIndex2)<->$F)& (v5047(VarCurr,bitIndex1)<->$T)& (v5047(VarCurr,bitIndex0)<->$F)).
% 297.39/295.61  all VarCurr ((v5622(VarCurr,bitIndex2)<->v5040(VarCurr,bitIndex13))& (v5622(VarCurr,bitIndex1)<->v5040(VarCurr,bitIndex12))& (v5622(VarCurr,bitIndex0)<->v5040(VarCurr,bitIndex11))).
% 297.39/295.61  all VarCurr ((v5622(VarCurr,bitIndex26)<->v5076(VarCurr,bitIndex23))& (v5622(VarCurr,bitIndex25)<->v5076(VarCurr,bitIndex22))& (v5622(VarCurr,bitIndex24)<->v5076(VarCurr,bitIndex21))& (v5622(VarCurr,bitIndex23)<->v5076(VarCurr,bitIndex20))& (v5622(VarCurr,bitIndex22)<->v5076(VarCurr,bitIndex19))& (v5622(VarCurr,bitIndex21)<->v5076(VarCurr,bitIndex18))& (v5622(VarCurr,bitIndex20)<->v5076(VarCurr,bitIndex17))& (v5622(VarCurr,bitIndex19)<->v5076(VarCurr,bitIndex16))& (v5622(VarCurr,bitIndex18)<->v5076(VarCurr,bitIndex15))& (v5622(VarCurr,bitIndex17)<->v5076(VarCurr,bitIndex14))& (v5622(VarCurr,bitIndex16)<->v5076(VarCurr,bitIndex13))& (v5622(VarCurr,bitIndex15)<->v5076(VarCurr,bitIndex12))& (v5622(VarCurr,bitIndex14)<->v5076(VarCurr,bitIndex11))& (v5622(VarCurr,bitIndex13)<->v5076(VarCurr,bitIndex10))& (v5622(VarCurr,bitIndex12)<->v5076(VarCurr,bitIndex9))& (v5622(VarCurr,bitIndex11)<->v5076(VarCurr,bitIndex8))& (v5622(VarCurr,bitIndex10)<->v5076(VarCurr,bitIndex7))& (v5622(VarCurr,bitIndex9)<->v5076(VarCurr,bitIndex6))& (v5622(VarCurr,bitIndex8)<->v5076(VarCurr,bitIndex5))& (v5622(VarCurr,bitIndex7)<->v5076(VarCurr,bitIndex4))& (v5622(VarCurr,bitIndex6)<->v5076(VarCurr,bitIndex3))& (v5622(VarCurr,bitIndex5)<->v5076(VarCurr,bitIndex2))& (v5622(VarCurr,bitIndex4)<->v5076(VarCurr,bitIndex1))& (v5622(VarCurr,bitIndex3)<->v5076(VarCurr,bitIndex0))).
% 297.39/295.62  all VarCurr (v5621(VarCurr)<-> (v5047(VarCurr,bitIndex2)<->$F)& (v5047(VarCurr,bitIndex1)<->$F)& (v5047(VarCurr,bitIndex0)<->$T)).
% 297.39/295.62  all VarCurr (v5620(VarCurr)<-> (v5047(VarCurr,bitIndex2)<->$F)& (v5047(VarCurr,bitIndex1)<->$F)& (v5047(VarCurr,bitIndex0)<->$F)).
% 297.39/295.62  all VarCurr (v5076(VarCurr,bitIndex2)<->v5107(VarCurr,bitIndex2)).
% 297.39/295.62  all VarCurr (v5076(VarCurr,bitIndex5)<->v5107(VarCurr,bitIndex5)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex0)<->v5615(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex1)<->v5610(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex2)<->v5605(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex3)<->v5600(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex4)<->v5595(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex5)<->v5590(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex6)<->v5585(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex7)<->v5580(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex8)<->v5575(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex9)<->v5570(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex10)<->v5565(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex11)<->v5560(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex12)<->v5555(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex13)<->v5550(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex14)<->v5545(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex15)<->v5540(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex16)<->v5535(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex17)<->v5530(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex18)<->v5525(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex19)<->v5520(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex20)<->v5515(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex21)<->v5510(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex22)<->v5505(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex23)<->v5500(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex24)<->v5495(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex25)<->v5490(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex26)<->v5485(VarCurr)).
% 297.39/295.62  all VarCurr (v5107(VarCurr,bitIndex27)<->v5109(VarCurr)).
% 297.39/295.62  all VarCurr (v5615(VarCurr)<->v5616(VarCurr)&v5618(VarCurr)).
% 297.39/295.62  all VarCurr (v5618(VarCurr)<->v5094(VarCurr,bitIndex0)|v5165(VarCurr,bitIndex0)).
% 297.39/295.62  all VarCurr (v5616(VarCurr)<->v5172(VarCurr)|v5617(VarCurr)).
% 297.39/295.62  all VarCurr (-v5617(VarCurr)<->v5165(VarCurr,bitIndex0)).
% 297.39/295.62  all VarCurr (v5610(VarCurr)<->v5611(VarCurr)&v5614(VarCurr)).
% 297.39/295.62  all VarCurr (v5614(VarCurr)<->v5164(VarCurr)|v5166(VarCurr)).
% 297.39/295.62  all VarCurr (v5611(VarCurr)<->v5612(VarCurr)|v5613(VarCurr)).
% 297.39/295.62  all VarCurr (-v5613(VarCurr)<->v5166(VarCurr)).
% 297.39/295.62  all VarCurr (-v5612(VarCurr)<->v5164(VarCurr)).
% 297.39/295.62  all VarCurr (v5605(VarCurr)<->v5606(VarCurr)&v5609(VarCurr)).
% 297.39/295.62  all VarCurr (v5609(VarCurr)<->v5162(VarCurr)|v5177(VarCurr)).
% 297.39/295.62  all VarCurr (v5606(VarCurr)<->v5607(VarCurr)|v5608(VarCurr)).
% 297.39/295.62  all VarCurr (-v5608(VarCurr)<->v5177(VarCurr)).
% 297.39/295.62  all VarCurr (-v5607(VarCurr)<->v5162(VarCurr)).
% 297.39/295.62  all VarCurr (v5600(VarCurr)<->v5601(VarCurr)&v5604(VarCurr)).
% 297.39/295.62  all VarCurr (v5604(VarCurr)<->v5160(VarCurr)|v5189(VarCurr)).
% 297.39/295.62  all VarCurr (v5601(VarCurr)<->v5602(VarCurr)|v5603(VarCurr)).
% 297.39/295.62  all VarCurr (-v5603(VarCurr)<->v5189(VarCurr)).
% 297.39/295.62  all VarCurr (-v5602(VarCurr)<->v5160(VarCurr)).
% 297.39/295.62  all VarCurr (v5595(VarCurr)<->v5596(VarCurr)&v5599(VarCurr)).
% 297.39/295.62  all VarCurr (v5599(VarCurr)<->v5158(VarCurr)|v5201(VarCurr)).
% 297.39/295.62  all VarCurr (v5596(VarCurr)<->v5597(VarCurr)|v5598(VarCurr)).
% 297.39/295.62  all VarCurr (-v5598(VarCurr)<->v5201(VarCurr)).
% 297.39/295.62  all VarCurr (-v5597(VarCurr)<->v5158(VarCurr)).
% 297.39/295.62  all VarCurr (v5590(VarCurr)<->v5591(VarCurr)&v5594(VarCurr)).
% 297.39/295.62  all VarCurr (v5594(VarCurr)<->v5156(VarCurr)|v5213(VarCurr)).
% 297.39/295.62  all VarCurr (v5591(VarCurr)<->v5592(VarCurr)|v5593(VarCurr)).
% 297.39/295.63  all VarCurr (-v5593(VarCurr)<->v5213(VarCurr)).
% 297.39/295.63  all VarCurr (-v5592(VarCurr)<->v5156(VarCurr)).
% 297.39/295.63  all VarCurr (v5585(VarCurr)<->v5586(VarCurr)&v5589(VarCurr)).
% 297.39/295.63  all VarCurr (v5589(VarCurr)<->v5154(VarCurr)|v5225(VarCurr)).
% 297.39/295.63  all VarCurr (v5586(VarCurr)<->v5587(VarCurr)|v5588(VarCurr)).
% 297.39/295.63  all VarCurr (-v5588(VarCurr)<->v5225(VarCurr)).
% 297.39/295.63  all VarCurr (-v5587(VarCurr)<->v5154(VarCurr)).
% 297.39/295.63  all VarCurr (v5580(VarCurr)<->v5581(VarCurr)&v5584(VarCurr)).
% 297.39/295.63  all VarCurr (v5584(VarCurr)<->v5152(VarCurr)|v5237(VarCurr)).
% 297.39/295.63  all VarCurr (v5581(VarCurr)<->v5582(VarCurr)|v5583(VarCurr)).
% 297.39/295.63  all VarCurr (-v5583(VarCurr)<->v5237(VarCurr)).
% 297.39/295.63  all VarCurr (-v5582(VarCurr)<->v5152(VarCurr)).
% 297.39/295.63  all VarCurr (v5575(VarCurr)<->v5576(VarCurr)&v5579(VarCurr)).
% 297.39/295.63  all VarCurr (v5579(VarCurr)<->v5150(VarCurr)|v5249(VarCurr)).
% 297.39/295.63  all VarCurr (v5576(VarCurr)<->v5577(VarCurr)|v5578(VarCurr)).
% 297.39/295.63  all VarCurr (-v5578(VarCurr)<->v5249(VarCurr)).
% 297.39/295.63  all VarCurr (-v5577(VarCurr)<->v5150(VarCurr)).
% 297.39/295.63  all VarCurr (v5570(VarCurr)<->v5571(VarCurr)&v5574(VarCurr)).
% 297.39/295.63  all VarCurr (v5574(VarCurr)<->v5148(VarCurr)|v5261(VarCurr)).
% 297.39/295.63  all VarCurr (v5571(VarCurr)<->v5572(VarCurr)|v5573(VarCurr)).
% 297.39/295.63  all VarCurr (-v5573(VarCurr)<->v5261(VarCurr)).
% 297.39/295.63  all VarCurr (-v5572(VarCurr)<->v5148(VarCurr)).
% 297.39/295.63  all VarCurr (v5565(VarCurr)<->v5566(VarCurr)&v5569(VarCurr)).
% 297.39/295.63  all VarCurr (v5569(VarCurr)<->v5146(VarCurr)|v5273(VarCurr)).
% 297.39/295.63  all VarCurr (v5566(VarCurr)<->v5567(VarCurr)|v5568(VarCurr)).
% 297.39/295.63  all VarCurr (-v5568(VarCurr)<->v5273(VarCurr)).
% 297.39/295.63  all VarCurr (-v5567(VarCurr)<->v5146(VarCurr)).
% 297.39/295.63  all VarCurr (v5560(VarCurr)<->v5561(VarCurr)&v5564(VarCurr)).
% 297.39/295.63  all VarCurr (v5564(VarCurr)<->v5144(VarCurr)|v5285(VarCurr)).
% 297.39/295.63  all VarCurr (v5561(VarCurr)<->v5562(VarCurr)|v5563(VarCurr)).
% 297.39/295.63  all VarCurr (-v5563(VarCurr)<->v5285(VarCurr)).
% 297.39/295.63  all VarCurr (-v5562(VarCurr)<->v5144(VarCurr)).
% 297.39/295.63  all VarCurr (v5555(VarCurr)<->v5556(VarCurr)&v5559(VarCurr)).
% 297.39/295.63  all VarCurr (v5559(VarCurr)<->v5142(VarCurr)|v5297(VarCurr)).
% 297.39/295.63  all VarCurr (v5556(VarCurr)<->v5557(VarCurr)|v5558(VarCurr)).
% 297.39/295.63  all VarCurr (-v5558(VarCurr)<->v5297(VarCurr)).
% 297.39/295.63  all VarCurr (-v5557(VarCurr)<->v5142(VarCurr)).
% 297.39/295.63  all VarCurr (v5550(VarCurr)<->v5551(VarCurr)&v5554(VarCurr)).
% 297.39/295.63  all VarCurr (v5554(VarCurr)<->v5140(VarCurr)|v5309(VarCurr)).
% 297.39/295.63  all VarCurr (v5551(VarCurr)<->v5552(VarCurr)|v5553(VarCurr)).
% 297.39/295.63  all VarCurr (-v5553(VarCurr)<->v5309(VarCurr)).
% 297.39/295.63  all VarCurr (-v5552(VarCurr)<->v5140(VarCurr)).
% 297.39/295.63  all VarCurr (v5545(VarCurr)<->v5546(VarCurr)&v5549(VarCurr)).
% 297.39/295.63  all VarCurr (v5549(VarCurr)<->v5138(VarCurr)|v5321(VarCurr)).
% 297.39/295.63  all VarCurr (v5546(VarCurr)<->v5547(VarCurr)|v5548(VarCurr)).
% 297.39/295.63  all VarCurr (-v5548(VarCurr)<->v5321(VarCurr)).
% 297.39/295.63  all VarCurr (-v5547(VarCurr)<->v5138(VarCurr)).
% 297.39/295.63  all VarCurr (v5540(VarCurr)<->v5541(VarCurr)&v5544(VarCurr)).
% 297.39/295.63  all VarCurr (v5544(VarCurr)<->v5136(VarCurr)|v5333(VarCurr)).
% 297.39/295.63  all VarCurr (v5541(VarCurr)<->v5542(VarCurr)|v5543(VarCurr)).
% 297.39/295.63  all VarCurr (-v5543(VarCurr)<->v5333(VarCurr)).
% 297.39/295.63  all VarCurr (-v5542(VarCurr)<->v5136(VarCurr)).
% 297.39/295.63  all VarCurr (v5535(VarCurr)<->v5536(VarCurr)&v5539(VarCurr)).
% 297.39/295.63  all VarCurr (v5539(VarCurr)<->v5134(VarCurr)|v5345(VarCurr)).
% 297.39/295.63  all VarCurr (v5536(VarCurr)<->v5537(VarCurr)|v5538(VarCurr)).
% 297.39/295.63  all VarCurr (-v5538(VarCurr)<->v5345(VarCurr)).
% 297.39/295.63  all VarCurr (-v5537(VarCurr)<->v5134(VarCurr)).
% 297.39/295.63  all VarCurr (v5530(VarCurr)<->v5531(VarCurr)&v5534(VarCurr)).
% 297.39/295.63  all VarCurr (v5534(VarCurr)<->v5132(VarCurr)|v5357(VarCurr)).
% 297.39/295.63  all VarCurr (v5531(VarCurr)<->v5532(VarCurr)|v5533(VarCurr)).
% 297.39/295.63  all VarCurr (-v5533(VarCurr)<->v5357(VarCurr)).
% 297.39/295.63  all VarCurr (-v5532(VarCurr)<->v5132(VarCurr)).
% 297.39/295.63  all VarCurr (v5525(VarCurr)<->v5526(VarCurr)&v5529(VarCurr)).
% 297.39/295.63  all VarCurr (v5529(VarCurr)<->v5130(VarCurr)|v5369(VarCurr)).
% 297.39/295.63  all VarCurr (v5526(VarCurr)<->v5527(VarCurr)|v5528(VarCurr)).
% 297.39/295.63  all VarCurr (-v5528(VarCurr)<->v5369(VarCurr)).
% 297.39/295.63  all VarCurr (-v5527(VarCurr)<->v5130(VarCurr)).
% 297.39/295.63  all VarCurr (v5520(VarCurr)<->v5521(VarCurr)&v5524(VarCurr)).
% 297.39/295.63  all VarCurr (v5524(VarCurr)<->v5128(VarCurr)|v5381(VarCurr)).
% 297.39/295.63  all VarCurr (v5521(VarCurr)<->v5522(VarCurr)|v5523(VarCurr)).
% 297.39/295.63  all VarCurr (-v5523(VarCurr)<->v5381(VarCurr)).
% 297.39/295.63  all VarCurr (-v5522(VarCurr)<->v5128(VarCurr)).
% 297.39/295.63  all VarCurr (v5515(VarCurr)<->v5516(VarCurr)&v5519(VarCurr)).
% 297.44/295.64  all VarCurr (v5519(VarCurr)<->v5126(VarCurr)|v5393(VarCurr)).
% 297.44/295.64  all VarCurr (v5516(VarCurr)<->v5517(VarCurr)|v5518(VarCurr)).
% 297.44/295.64  all VarCurr (-v5518(VarCurr)<->v5393(VarCurr)).
% 297.44/295.64  all VarCurr (-v5517(VarCurr)<->v5126(VarCurr)).
% 297.44/295.64  all VarCurr (v5510(VarCurr)<->v5511(VarCurr)&v5514(VarCurr)).
% 297.44/295.64  all VarCurr (v5514(VarCurr)<->v5124(VarCurr)|v5405(VarCurr)).
% 297.44/295.64  all VarCurr (v5511(VarCurr)<->v5512(VarCurr)|v5513(VarCurr)).
% 297.44/295.64  all VarCurr (-v5513(VarCurr)<->v5405(VarCurr)).
% 297.44/295.64  all VarCurr (-v5512(VarCurr)<->v5124(VarCurr)).
% 297.44/295.64  all VarCurr (v5505(VarCurr)<->v5506(VarCurr)&v5509(VarCurr)).
% 297.44/295.64  all VarCurr (v5509(VarCurr)<->v5122(VarCurr)|v5417(VarCurr)).
% 297.44/295.64  all VarCurr (v5506(VarCurr)<->v5507(VarCurr)|v5508(VarCurr)).
% 297.44/295.64  all VarCurr (-v5508(VarCurr)<->v5417(VarCurr)).
% 297.44/295.64  all VarCurr (-v5507(VarCurr)<->v5122(VarCurr)).
% 297.44/295.64  all VarCurr (v5500(VarCurr)<->v5501(VarCurr)&v5504(VarCurr)).
% 297.44/295.64  all VarCurr (v5504(VarCurr)<->v5120(VarCurr)|v5429(VarCurr)).
% 297.44/295.64  all VarCurr (v5501(VarCurr)<->v5502(VarCurr)|v5503(VarCurr)).
% 297.44/295.64  all VarCurr (-v5503(VarCurr)<->v5429(VarCurr)).
% 297.44/295.64  all VarCurr (-v5502(VarCurr)<->v5120(VarCurr)).
% 297.44/295.64  all VarCurr (v5495(VarCurr)<->v5496(VarCurr)&v5499(VarCurr)).
% 297.44/295.64  all VarCurr (v5499(VarCurr)<->v5118(VarCurr)|v5441(VarCurr)).
% 297.44/295.64  all VarCurr (v5496(VarCurr)<->v5497(VarCurr)|v5498(VarCurr)).
% 297.44/295.64  all VarCurr (-v5498(VarCurr)<->v5441(VarCurr)).
% 297.44/295.64  all VarCurr (-v5497(VarCurr)<->v5118(VarCurr)).
% 297.44/295.64  all VarCurr (v5490(VarCurr)<->v5491(VarCurr)&v5494(VarCurr)).
% 297.44/295.64  all VarCurr (v5494(VarCurr)<->v5116(VarCurr)|v5453(VarCurr)).
% 297.44/295.64  all VarCurr (v5491(VarCurr)<->v5492(VarCurr)|v5493(VarCurr)).
% 297.44/295.64  all VarCurr (-v5493(VarCurr)<->v5453(VarCurr)).
% 297.44/295.64  all VarCurr (-v5492(VarCurr)<->v5116(VarCurr)).
% 297.44/295.64  all VarCurr (v5485(VarCurr)<->v5486(VarCurr)&v5489(VarCurr)).
% 297.44/295.64  all VarCurr (v5489(VarCurr)<->v5114(VarCurr)|v5465(VarCurr)).
% 297.44/295.64  all VarCurr (v5486(VarCurr)<->v5487(VarCurr)|v5488(VarCurr)).
% 297.44/295.64  all VarCurr (-v5488(VarCurr)<->v5465(VarCurr)).
% 297.44/295.64  all VarCurr (-v5487(VarCurr)<->v5114(VarCurr)).
% 297.44/295.64  all VarCurr (v5109(VarCurr)<->v5110(VarCurr)&v5484(VarCurr)).
% 297.44/295.64  all VarCurr (v5484(VarCurr)<->v5112(VarCurr)|v5478(VarCurr)).
% 297.44/295.64  all VarCurr (v5110(VarCurr)<->v5111(VarCurr)|v5477(VarCurr)).
% 297.44/295.64  all VarCurr (-v5477(VarCurr)<->v5478(VarCurr)).
% 297.44/295.64  all VarCurr (v5478(VarCurr)<->v5479(VarCurr)&v5482(VarCurr)).
% 297.44/295.64  all VarCurr (v5482(VarCurr)<->v5483(VarCurr)|v5165(VarCurr,bitIndex27)).
% 297.44/295.64  all VarCurr (-v5483(VarCurr)<->v5480(VarCurr)).
% 297.44/295.64  all VarCurr (v5479(VarCurr)<->v5480(VarCurr)|v5481(VarCurr)).
% 297.44/295.64  all VarCurr (-v5481(VarCurr)<->v5165(VarCurr,bitIndex27)).
% 297.44/295.64  all VarCurr (v5480(VarCurr)<->v5471(VarCurr)&v5473(VarCurr)).
% 297.44/295.64  all VarCurr (-v5111(VarCurr)<->v5112(VarCurr)).
% 297.44/295.64  all VarCurr (v5112(VarCurr)<->v5113(VarCurr)|v5476(VarCurr)).
% 297.44/295.64  all VarCurr (v5476(VarCurr)<->v5468(VarCurr)&v5165(VarCurr,bitIndex26)).
% 297.44/295.64  all VarCurr (v5113(VarCurr)<->v5114(VarCurr)&v5465(VarCurr)).
% 297.44/295.64  all VarCurr (v5465(VarCurr)<->v5466(VarCurr)&v5475(VarCurr)).
% 297.44/295.64  all VarCurr (v5475(VarCurr)<->v5468(VarCurr)|v5165(VarCurr,bitIndex26)).
% 297.44/295.64  all VarCurr (v5466(VarCurr)<->v5467(VarCurr)|v5474(VarCurr)).
% 297.44/295.64  all VarCurr (-v5474(VarCurr)<->v5165(VarCurr,bitIndex26)).
% 297.44/295.64  all VarCurr (-v5467(VarCurr)<->v5468(VarCurr)).
% 297.44/295.64  all VarCurr (v5468(VarCurr)<->v5469(VarCurr)&v5472(VarCurr)).
% 297.44/295.64  all VarCurr (v5472(VarCurr)<->v5471(VarCurr)|v5473(VarCurr)).
% 297.44/295.64  all VarCurr (-v5473(VarCurr)<->v5094(VarCurr,bitIndex26)).
% 297.44/295.64  all VarCurr (v5469(VarCurr)<->v5470(VarCurr)|v5094(VarCurr,bitIndex26)).
% 297.44/295.64  all VarCurr (-v5470(VarCurr)<->v5471(VarCurr)).
% 297.44/295.64  all VarCurr (v5471(VarCurr)<->v5459(VarCurr)&v5461(VarCurr)).
% 297.44/295.64  all VarCurr (v5114(VarCurr)<->v5115(VarCurr)|v5464(VarCurr)).
% 297.44/295.64  all VarCurr (v5464(VarCurr)<->v5456(VarCurr)&v5165(VarCurr,bitIndex25)).
% 297.44/295.64  all VarCurr (v5115(VarCurr)<->v5116(VarCurr)&v5453(VarCurr)).
% 297.44/295.64  all VarCurr (v5453(VarCurr)<->v5454(VarCurr)&v5463(VarCurr)).
% 297.44/295.64  all VarCurr (v5463(VarCurr)<->v5456(VarCurr)|v5165(VarCurr,bitIndex25)).
% 297.44/295.64  all VarCurr (v5454(VarCurr)<->v5455(VarCurr)|v5462(VarCurr)).
% 297.44/295.64  all VarCurr (-v5462(VarCurr)<->v5165(VarCurr,bitIndex25)).
% 297.44/295.64  all VarCurr (-v5455(VarCurr)<->v5456(VarCurr)).
% 297.44/295.64  all VarCurr (v5456(VarCurr)<->v5457(VarCurr)&v5460(VarCurr)).
% 297.44/295.64  all VarCurr (v5460(VarCurr)<->v5459(VarCurr)|v5461(VarCurr)).
% 297.44/295.64  all VarCurr (-v5461(VarCurr)<->v5094(VarCurr,bitIndex25)).
% 297.44/295.65  all VarCurr (v5457(VarCurr)<->v5458(VarCurr)|v5094(VarCurr,bitIndex25)).
% 297.44/295.65  all VarCurr (-v5458(VarCurr)<->v5459(VarCurr)).
% 297.44/295.65  all VarCurr (v5459(VarCurr)<->v5447(VarCurr)&v5449(VarCurr)).
% 297.44/295.65  all VarCurr (v5116(VarCurr)<->v5117(VarCurr)|v5452(VarCurr)).
% 297.44/295.65  all VarCurr (v5452(VarCurr)<->v5444(VarCurr)&v5165(VarCurr,bitIndex24)).
% 297.44/295.65  all VarCurr (v5117(VarCurr)<->v5118(VarCurr)&v5441(VarCurr)).
% 297.44/295.65  all VarCurr (v5441(VarCurr)<->v5442(VarCurr)&v5451(VarCurr)).
% 297.44/295.65  all VarCurr (v5451(VarCurr)<->v5444(VarCurr)|v5165(VarCurr,bitIndex24)).
% 297.44/295.65  all VarCurr (v5442(VarCurr)<->v5443(VarCurr)|v5450(VarCurr)).
% 297.44/295.65  all VarCurr (-v5450(VarCurr)<->v5165(VarCurr,bitIndex24)).
% 297.44/295.65  all VarCurr (-v5443(VarCurr)<->v5444(VarCurr)).
% 297.44/295.65  all VarCurr (v5444(VarCurr)<->v5445(VarCurr)&v5448(VarCurr)).
% 297.44/295.65  all VarCurr (v5448(VarCurr)<->v5447(VarCurr)|v5449(VarCurr)).
% 297.44/295.65  all VarCurr (-v5449(VarCurr)<->v5094(VarCurr,bitIndex24)).
% 297.44/295.65  all VarCurr (v5445(VarCurr)<->v5446(VarCurr)|v5094(VarCurr,bitIndex24)).
% 297.44/295.65  all VarCurr (-v5446(VarCurr)<->v5447(VarCurr)).
% 297.44/295.65  all VarCurr (v5447(VarCurr)<->v5435(VarCurr)&v5437(VarCurr)).
% 297.44/295.65  all VarCurr (v5118(VarCurr)<->v5119(VarCurr)|v5440(VarCurr)).
% 297.44/295.65  all VarCurr (v5440(VarCurr)<->v5432(VarCurr)&v5165(VarCurr,bitIndex23)).
% 297.44/295.65  all VarCurr (v5119(VarCurr)<->v5120(VarCurr)&v5429(VarCurr)).
% 297.44/295.65  all VarCurr (v5429(VarCurr)<->v5430(VarCurr)&v5439(VarCurr)).
% 297.44/295.65  all VarCurr (v5439(VarCurr)<->v5432(VarCurr)|v5165(VarCurr,bitIndex23)).
% 297.44/295.65  all VarCurr (v5430(VarCurr)<->v5431(VarCurr)|v5438(VarCurr)).
% 297.44/295.65  all VarCurr (-v5438(VarCurr)<->v5165(VarCurr,bitIndex23)).
% 297.44/295.65  all VarCurr (-v5431(VarCurr)<->v5432(VarCurr)).
% 297.44/295.65  all VarCurr (v5432(VarCurr)<->v5433(VarCurr)&v5436(VarCurr)).
% 297.44/295.65  all VarCurr (v5436(VarCurr)<->v5435(VarCurr)|v5437(VarCurr)).
% 297.44/295.65  all VarCurr (-v5437(VarCurr)<->v5094(VarCurr,bitIndex23)).
% 297.44/295.65  all VarCurr (v5433(VarCurr)<->v5434(VarCurr)|v5094(VarCurr,bitIndex23)).
% 297.44/295.65  all VarCurr (-v5434(VarCurr)<->v5435(VarCurr)).
% 297.44/295.65  all VarCurr (v5435(VarCurr)<->v5423(VarCurr)&v5425(VarCurr)).
% 297.44/295.65  all VarCurr (v5120(VarCurr)<->v5121(VarCurr)|v5428(VarCurr)).
% 297.44/295.65  all VarCurr (v5428(VarCurr)<->v5420(VarCurr)&v5165(VarCurr,bitIndex22)).
% 297.44/295.65  all VarCurr (v5121(VarCurr)<->v5122(VarCurr)&v5417(VarCurr)).
% 297.44/295.65  all VarCurr (v5417(VarCurr)<->v5418(VarCurr)&v5427(VarCurr)).
% 297.44/295.65  all VarCurr (v5427(VarCurr)<->v5420(VarCurr)|v5165(VarCurr,bitIndex22)).
% 297.44/295.65  all VarCurr (v5418(VarCurr)<->v5419(VarCurr)|v5426(VarCurr)).
% 297.44/295.65  all VarCurr (-v5426(VarCurr)<->v5165(VarCurr,bitIndex22)).
% 297.44/295.65  all VarCurr (-v5419(VarCurr)<->v5420(VarCurr)).
% 297.44/295.65  all VarCurr (v5420(VarCurr)<->v5421(VarCurr)&v5424(VarCurr)).
% 297.44/295.65  all VarCurr (v5424(VarCurr)<->v5423(VarCurr)|v5425(VarCurr)).
% 297.44/295.65  all VarCurr (-v5425(VarCurr)<->v5094(VarCurr,bitIndex22)).
% 297.44/295.65  all VarCurr (v5421(VarCurr)<->v5422(VarCurr)|v5094(VarCurr,bitIndex22)).
% 297.44/295.65  all VarCurr (-v5422(VarCurr)<->v5423(VarCurr)).
% 297.44/295.65  all VarCurr (v5423(VarCurr)<->v5411(VarCurr)&v5413(VarCurr)).
% 297.44/295.65  all VarCurr (v5122(VarCurr)<->v5123(VarCurr)|v5416(VarCurr)).
% 297.44/295.65  all VarCurr (v5416(VarCurr)<->v5408(VarCurr)&v5165(VarCurr,bitIndex21)).
% 297.44/295.65  all VarCurr (v5123(VarCurr)<->v5124(VarCurr)&v5405(VarCurr)).
% 297.44/295.65  all VarCurr (v5405(VarCurr)<->v5406(VarCurr)&v5415(VarCurr)).
% 297.44/295.65  all VarCurr (v5415(VarCurr)<->v5408(VarCurr)|v5165(VarCurr,bitIndex21)).
% 297.44/295.65  all VarCurr (v5406(VarCurr)<->v5407(VarCurr)|v5414(VarCurr)).
% 297.44/295.65  all VarCurr (-v5414(VarCurr)<->v5165(VarCurr,bitIndex21)).
% 297.44/295.65  all VarCurr (-v5407(VarCurr)<->v5408(VarCurr)).
% 297.44/295.65  all VarCurr (v5408(VarCurr)<->v5409(VarCurr)&v5412(VarCurr)).
% 297.44/295.65  all VarCurr (v5412(VarCurr)<->v5411(VarCurr)|v5413(VarCurr)).
% 297.44/295.65  all VarCurr (-v5413(VarCurr)<->v5094(VarCurr,bitIndex21)).
% 297.44/295.65  all VarCurr (v5409(VarCurr)<->v5410(VarCurr)|v5094(VarCurr,bitIndex21)).
% 297.44/295.65  all VarCurr (-v5410(VarCurr)<->v5411(VarCurr)).
% 297.44/295.65  all VarCurr (v5411(VarCurr)<->v5399(VarCurr)&v5401(VarCurr)).
% 297.44/295.65  all VarCurr (v5124(VarCurr)<->v5125(VarCurr)|v5404(VarCurr)).
% 297.44/295.65  all VarCurr (v5404(VarCurr)<->v5396(VarCurr)&v5165(VarCurr,bitIndex20)).
% 297.44/295.65  all VarCurr (v5125(VarCurr)<->v5126(VarCurr)&v5393(VarCurr)).
% 297.44/295.65  all VarCurr (v5393(VarCurr)<->v5394(VarCurr)&v5403(VarCurr)).
% 297.44/295.65  all VarCurr (v5403(VarCurr)<->v5396(VarCurr)|v5165(VarCurr,bitIndex20)).
% 297.44/295.65  all VarCurr (v5394(VarCurr)<->v5395(VarCurr)|v5402(VarCurr)).
% 297.44/295.65  all VarCurr (-v5402(VarCurr)<->v5165(VarCurr,bitIndex20)).
% 297.46/295.66  all VarCurr (-v5395(VarCurr)<->v5396(VarCurr)).
% 297.46/295.66  all VarCurr (v5396(VarCurr)<->v5397(VarCurr)&v5400(VarCurr)).
% 297.46/295.66  all VarCurr (v5400(VarCurr)<->v5399(VarCurr)|v5401(VarCurr)).
% 297.46/295.66  all VarCurr (-v5401(VarCurr)<->v5094(VarCurr,bitIndex20)).
% 297.46/295.66  all VarCurr (v5397(VarCurr)<->v5398(VarCurr)|v5094(VarCurr,bitIndex20)).
% 297.46/295.66  all VarCurr (-v5398(VarCurr)<->v5399(VarCurr)).
% 297.46/295.66  all VarCurr (v5399(VarCurr)<->v5387(VarCurr)&v5389(VarCurr)).
% 297.46/295.66  all VarCurr (v5126(VarCurr)<->v5127(VarCurr)|v5392(VarCurr)).
% 297.46/295.66  all VarCurr (v5392(VarCurr)<->v5384(VarCurr)&v5165(VarCurr,bitIndex19)).
% 297.46/295.66  all VarCurr (v5127(VarCurr)<->v5128(VarCurr)&v5381(VarCurr)).
% 297.46/295.66  all VarCurr (v5381(VarCurr)<->v5382(VarCurr)&v5391(VarCurr)).
% 297.46/295.66  all VarCurr (v5391(VarCurr)<->v5384(VarCurr)|v5165(VarCurr,bitIndex19)).
% 297.46/295.66  all VarCurr (v5382(VarCurr)<->v5383(VarCurr)|v5390(VarCurr)).
% 297.46/295.66  all VarCurr (-v5390(VarCurr)<->v5165(VarCurr,bitIndex19)).
% 297.46/295.66  all VarCurr (-v5383(VarCurr)<->v5384(VarCurr)).
% 297.46/295.66  all VarCurr (v5384(VarCurr)<->v5385(VarCurr)&v5388(VarCurr)).
% 297.46/295.66  all VarCurr (v5388(VarCurr)<->v5387(VarCurr)|v5389(VarCurr)).
% 297.46/295.66  all VarCurr (-v5389(VarCurr)<->v5094(VarCurr,bitIndex19)).
% 297.46/295.66  all VarCurr (v5385(VarCurr)<->v5386(VarCurr)|v5094(VarCurr,bitIndex19)).
% 297.46/295.66  all VarCurr (-v5386(VarCurr)<->v5387(VarCurr)).
% 297.46/295.66  all VarCurr (v5387(VarCurr)<->v5375(VarCurr)&v5377(VarCurr)).
% 297.46/295.66  all VarCurr (v5128(VarCurr)<->v5129(VarCurr)|v5380(VarCurr)).
% 297.46/295.66  all VarCurr (v5380(VarCurr)<->v5372(VarCurr)&v5165(VarCurr,bitIndex18)).
% 297.46/295.66  all VarCurr (v5129(VarCurr)<->v5130(VarCurr)&v5369(VarCurr)).
% 297.46/295.66  all VarCurr (v5369(VarCurr)<->v5370(VarCurr)&v5379(VarCurr)).
% 297.46/295.66  all VarCurr (v5379(VarCurr)<->v5372(VarCurr)|v5165(VarCurr,bitIndex18)).
% 297.46/295.66  all VarCurr (v5370(VarCurr)<->v5371(VarCurr)|v5378(VarCurr)).
% 297.46/295.66  all VarCurr (-v5378(VarCurr)<->v5165(VarCurr,bitIndex18)).
% 297.46/295.66  all VarCurr (-v5371(VarCurr)<->v5372(VarCurr)).
% 297.46/295.66  all VarCurr (v5372(VarCurr)<->v5373(VarCurr)&v5376(VarCurr)).
% 297.46/295.66  all VarCurr (v5376(VarCurr)<->v5375(VarCurr)|v5377(VarCurr)).
% 297.46/295.66  all VarCurr (-v5377(VarCurr)<->v5094(VarCurr,bitIndex18)).
% 297.46/295.66  all VarCurr (v5373(VarCurr)<->v5374(VarCurr)|v5094(VarCurr,bitIndex18)).
% 297.46/295.66  all VarCurr (-v5374(VarCurr)<->v5375(VarCurr)).
% 297.46/295.66  all VarCurr (v5375(VarCurr)<->v5363(VarCurr)&v5365(VarCurr)).
% 297.46/295.66  all VarCurr (v5130(VarCurr)<->v5131(VarCurr)|v5368(VarCurr)).
% 297.46/295.66  all VarCurr (v5368(VarCurr)<->v5360(VarCurr)&v5165(VarCurr,bitIndex17)).
% 297.46/295.66  all VarCurr (v5131(VarCurr)<->v5132(VarCurr)&v5357(VarCurr)).
% 297.46/295.66  all VarCurr (v5357(VarCurr)<->v5358(VarCurr)&v5367(VarCurr)).
% 297.46/295.66  all VarCurr (v5367(VarCurr)<->v5360(VarCurr)|v5165(VarCurr,bitIndex17)).
% 297.46/295.66  all VarCurr (v5358(VarCurr)<->v5359(VarCurr)|v5366(VarCurr)).
% 297.46/295.66  all VarCurr (-v5366(VarCurr)<->v5165(VarCurr,bitIndex17)).
% 297.46/295.66  all VarCurr (-v5359(VarCurr)<->v5360(VarCurr)).
% 297.46/295.66  all VarCurr (v5360(VarCurr)<->v5361(VarCurr)&v5364(VarCurr)).
% 297.46/295.66  all VarCurr (v5364(VarCurr)<->v5363(VarCurr)|v5365(VarCurr)).
% 297.46/295.66  all VarCurr (-v5365(VarCurr)<->v5094(VarCurr,bitIndex17)).
% 297.46/295.66  all VarCurr (v5361(VarCurr)<->v5362(VarCurr)|v5094(VarCurr,bitIndex17)).
% 297.46/295.66  all VarCurr (-v5362(VarCurr)<->v5363(VarCurr)).
% 297.46/295.66  all VarCurr (v5363(VarCurr)<->v5351(VarCurr)&v5353(VarCurr)).
% 297.46/295.66  all VarCurr (v5132(VarCurr)<->v5133(VarCurr)|v5356(VarCurr)).
% 297.46/295.66  all VarCurr (v5356(VarCurr)<->v5348(VarCurr)&v5165(VarCurr,bitIndex16)).
% 297.46/295.66  all VarCurr (v5133(VarCurr)<->v5134(VarCurr)&v5345(VarCurr)).
% 297.46/295.66  all VarCurr (v5345(VarCurr)<->v5346(VarCurr)&v5355(VarCurr)).
% 297.46/295.66  all VarCurr (v5355(VarCurr)<->v5348(VarCurr)|v5165(VarCurr,bitIndex16)).
% 297.46/295.66  all VarCurr (v5346(VarCurr)<->v5347(VarCurr)|v5354(VarCurr)).
% 297.46/295.66  all VarCurr (-v5354(VarCurr)<->v5165(VarCurr,bitIndex16)).
% 297.46/295.66  all VarCurr (-v5347(VarCurr)<->v5348(VarCurr)).
% 297.46/295.66  all VarCurr (v5348(VarCurr)<->v5349(VarCurr)&v5352(VarCurr)).
% 297.46/295.66  all VarCurr (v5352(VarCurr)<->v5351(VarCurr)|v5353(VarCurr)).
% 297.46/295.66  all VarCurr (-v5353(VarCurr)<->v5094(VarCurr,bitIndex16)).
% 297.46/295.66  all VarCurr (v5349(VarCurr)<->v5350(VarCurr)|v5094(VarCurr,bitIndex16)).
% 297.46/295.66  all VarCurr (-v5350(VarCurr)<->v5351(VarCurr)).
% 297.46/295.66  all VarCurr (v5351(VarCurr)<->v5339(VarCurr)&v5341(VarCurr)).
% 297.46/295.66  all VarCurr (v5134(VarCurr)<->v5135(VarCurr)|v5344(VarCurr)).
% 297.46/295.66  all VarCurr (v5344(VarCurr)<->v5336(VarCurr)&v5165(VarCurr,bitIndex15)).
% 297.46/295.66  all VarCurr (v5135(VarCurr)<->v5136(VarCurr)&v5333(VarCurr)).
% 297.46/295.67  all VarCurr (v5333(VarCurr)<->v5334(VarCurr)&v5343(VarCurr)).
% 297.46/295.67  all VarCurr (v5343(VarCurr)<->v5336(VarCurr)|v5165(VarCurr,bitIndex15)).
% 297.46/295.67  all VarCurr (v5334(VarCurr)<->v5335(VarCurr)|v5342(VarCurr)).
% 297.46/295.67  all VarCurr (-v5342(VarCurr)<->v5165(VarCurr,bitIndex15)).
% 297.46/295.67  all VarCurr (-v5335(VarCurr)<->v5336(VarCurr)).
% 297.46/295.67  all VarCurr (v5336(VarCurr)<->v5337(VarCurr)&v5340(VarCurr)).
% 297.46/295.67  all VarCurr (v5340(VarCurr)<->v5339(VarCurr)|v5341(VarCurr)).
% 297.46/295.67  all VarCurr (-v5341(VarCurr)<->v5094(VarCurr,bitIndex15)).
% 297.46/295.67  all VarCurr (v5337(VarCurr)<->v5338(VarCurr)|v5094(VarCurr,bitIndex15)).
% 297.46/295.67  all VarCurr (-v5338(VarCurr)<->v5339(VarCurr)).
% 297.46/295.67  all VarCurr (v5339(VarCurr)<->v5327(VarCurr)&v5329(VarCurr)).
% 297.46/295.67  all VarCurr (v5136(VarCurr)<->v5137(VarCurr)|v5332(VarCurr)).
% 297.46/295.67  all VarCurr (v5332(VarCurr)<->v5324(VarCurr)&v5165(VarCurr,bitIndex14)).
% 297.46/295.67  all VarCurr (v5137(VarCurr)<->v5138(VarCurr)&v5321(VarCurr)).
% 297.46/295.67  all VarCurr (v5321(VarCurr)<->v5322(VarCurr)&v5331(VarCurr)).
% 297.46/295.67  all VarCurr (v5331(VarCurr)<->v5324(VarCurr)|v5165(VarCurr,bitIndex14)).
% 297.46/295.67  all VarCurr (v5322(VarCurr)<->v5323(VarCurr)|v5330(VarCurr)).
% 297.46/295.67  all VarCurr (-v5330(VarCurr)<->v5165(VarCurr,bitIndex14)).
% 297.46/295.67  all VarCurr (-v5323(VarCurr)<->v5324(VarCurr)).
% 297.46/295.67  all VarCurr (v5324(VarCurr)<->v5325(VarCurr)&v5328(VarCurr)).
% 297.46/295.67  all VarCurr (v5328(VarCurr)<->v5327(VarCurr)|v5329(VarCurr)).
% 297.46/295.67  all VarCurr (-v5329(VarCurr)<->v5094(VarCurr,bitIndex14)).
% 297.46/295.67  all VarCurr (v5325(VarCurr)<->v5326(VarCurr)|v5094(VarCurr,bitIndex14)).
% 297.46/295.67  all VarCurr (-v5326(VarCurr)<->v5327(VarCurr)).
% 297.46/295.67  all VarCurr (v5327(VarCurr)<->v5315(VarCurr)&v5317(VarCurr)).
% 297.46/295.67  all VarCurr (v5138(VarCurr)<->v5139(VarCurr)|v5320(VarCurr)).
% 297.46/295.67  all VarCurr (v5320(VarCurr)<->v5312(VarCurr)&v5165(VarCurr,bitIndex13)).
% 297.46/295.67  all VarCurr (v5139(VarCurr)<->v5140(VarCurr)&v5309(VarCurr)).
% 297.46/295.67  all VarCurr (v5309(VarCurr)<->v5310(VarCurr)&v5319(VarCurr)).
% 297.46/295.67  all VarCurr (v5319(VarCurr)<->v5312(VarCurr)|v5165(VarCurr,bitIndex13)).
% 297.46/295.67  all VarCurr (v5310(VarCurr)<->v5311(VarCurr)|v5318(VarCurr)).
% 297.46/295.67  all VarCurr (-v5318(VarCurr)<->v5165(VarCurr,bitIndex13)).
% 297.46/295.67  all VarCurr (-v5311(VarCurr)<->v5312(VarCurr)).
% 297.46/295.67  all VarCurr (v5312(VarCurr)<->v5313(VarCurr)&v5316(VarCurr)).
% 297.46/295.67  all VarCurr (v5316(VarCurr)<->v5315(VarCurr)|v5317(VarCurr)).
% 297.46/295.67  all VarCurr (-v5317(VarCurr)<->v5094(VarCurr,bitIndex13)).
% 297.46/295.67  all VarCurr (v5313(VarCurr)<->v5314(VarCurr)|v5094(VarCurr,bitIndex13)).
% 297.46/295.67  all VarCurr (-v5314(VarCurr)<->v5315(VarCurr)).
% 297.46/295.67  all VarCurr (v5315(VarCurr)<->v5303(VarCurr)&v5305(VarCurr)).
% 297.46/295.67  all VarCurr (v5140(VarCurr)<->v5141(VarCurr)|v5308(VarCurr)).
% 297.46/295.67  all VarCurr (v5308(VarCurr)<->v5300(VarCurr)&v5165(VarCurr,bitIndex12)).
% 297.46/295.67  all VarCurr (v5141(VarCurr)<->v5142(VarCurr)&v5297(VarCurr)).
% 297.46/295.67  all VarCurr (v5297(VarCurr)<->v5298(VarCurr)&v5307(VarCurr)).
% 297.46/295.67  all VarCurr (v5307(VarCurr)<->v5300(VarCurr)|v5165(VarCurr,bitIndex12)).
% 297.46/295.67  all VarCurr (v5298(VarCurr)<->v5299(VarCurr)|v5306(VarCurr)).
% 297.46/295.67  all VarCurr (-v5306(VarCurr)<->v5165(VarCurr,bitIndex12)).
% 297.46/295.67  all VarCurr (-v5299(VarCurr)<->v5300(VarCurr)).
% 297.46/295.67  all VarCurr (v5300(VarCurr)<->v5301(VarCurr)&v5304(VarCurr)).
% 297.46/295.67  all VarCurr (v5304(VarCurr)<->v5303(VarCurr)|v5305(VarCurr)).
% 297.46/295.67  all VarCurr (-v5305(VarCurr)<->v5094(VarCurr,bitIndex12)).
% 297.46/295.67  all VarCurr (v5301(VarCurr)<->v5302(VarCurr)|v5094(VarCurr,bitIndex12)).
% 297.46/295.67  all VarCurr (-v5302(VarCurr)<->v5303(VarCurr)).
% 297.46/295.67  all VarCurr (v5303(VarCurr)<->v5291(VarCurr)&v5293(VarCurr)).
% 297.46/295.67  all VarCurr (v5142(VarCurr)<->v5143(VarCurr)|v5296(VarCurr)).
% 297.46/295.67  all VarCurr (v5296(VarCurr)<->v5288(VarCurr)&v5165(VarCurr,bitIndex11)).
% 297.46/295.67  all VarCurr (v5143(VarCurr)<->v5144(VarCurr)&v5285(VarCurr)).
% 297.46/295.67  all VarCurr (v5285(VarCurr)<->v5286(VarCurr)&v5295(VarCurr)).
% 297.46/295.67  all VarCurr (v5295(VarCurr)<->v5288(VarCurr)|v5165(VarCurr,bitIndex11)).
% 297.46/295.67  all VarCurr (v5286(VarCurr)<->v5287(VarCurr)|v5294(VarCurr)).
% 297.46/295.67  all VarCurr (-v5294(VarCurr)<->v5165(VarCurr,bitIndex11)).
% 297.46/295.67  all VarCurr (-v5287(VarCurr)<->v5288(VarCurr)).
% 297.46/295.67  all VarCurr (v5288(VarCurr)<->v5289(VarCurr)&v5292(VarCurr)).
% 297.46/295.67  all VarCurr (v5292(VarCurr)<->v5291(VarCurr)|v5293(VarCurr)).
% 297.46/295.67  all VarCurr (-v5293(VarCurr)<->v5094(VarCurr,bitIndex11)).
% 297.46/295.67  all VarCurr (v5289(VarCurr)<->v5290(VarCurr)|v5094(VarCurr,bitIndex11)).
% 297.46/295.67  all VarCurr (-v5290(VarCurr)<->v5291(VarCurr)).
% 297.46/295.68  all VarCurr (v5291(VarCurr)<->v5279(VarCurr)&v5281(VarCurr)).
% 297.46/295.68  all VarCurr (v5144(VarCurr)<->v5145(VarCurr)|v5284(VarCurr)).
% 297.46/295.68  all VarCurr (v5284(VarCurr)<->v5276(VarCurr)&v5165(VarCurr,bitIndex10)).
% 297.46/295.68  all VarCurr (v5145(VarCurr)<->v5146(VarCurr)&v5273(VarCurr)).
% 297.46/295.68  all VarCurr (v5273(VarCurr)<->v5274(VarCurr)&v5283(VarCurr)).
% 297.46/295.68  all VarCurr (v5283(VarCurr)<->v5276(VarCurr)|v5165(VarCurr,bitIndex10)).
% 297.46/295.68  all VarCurr (v5274(VarCurr)<->v5275(VarCurr)|v5282(VarCurr)).
% 297.46/295.68  all VarCurr (-v5282(VarCurr)<->v5165(VarCurr,bitIndex10)).
% 297.46/295.68  all VarCurr (-v5275(VarCurr)<->v5276(VarCurr)).
% 297.46/295.68  all VarCurr (v5276(VarCurr)<->v5277(VarCurr)&v5280(VarCurr)).
% 297.46/295.68  all VarCurr (v5280(VarCurr)<->v5279(VarCurr)|v5281(VarCurr)).
% 297.46/295.68  all VarCurr (-v5281(VarCurr)<->v5094(VarCurr,bitIndex10)).
% 297.46/295.68  all VarCurr (v5277(VarCurr)<->v5278(VarCurr)|v5094(VarCurr,bitIndex10)).
% 297.46/295.68  all VarCurr (-v5278(VarCurr)<->v5279(VarCurr)).
% 297.46/295.68  all VarCurr (v5279(VarCurr)<->v5267(VarCurr)&v5269(VarCurr)).
% 297.46/295.68  all VarCurr (v5146(VarCurr)<->v5147(VarCurr)|v5272(VarCurr)).
% 297.46/295.68  all VarCurr (v5272(VarCurr)<->v5264(VarCurr)&v5165(VarCurr,bitIndex9)).
% 297.46/295.68  all VarCurr (v5147(VarCurr)<->v5148(VarCurr)&v5261(VarCurr)).
% 297.46/295.68  all VarCurr (v5261(VarCurr)<->v5262(VarCurr)&v5271(VarCurr)).
% 297.46/295.68  all VarCurr (v5271(VarCurr)<->v5264(VarCurr)|v5165(VarCurr,bitIndex9)).
% 297.46/295.68  all VarCurr (v5262(VarCurr)<->v5263(VarCurr)|v5270(VarCurr)).
% 297.46/295.68  all VarCurr (-v5270(VarCurr)<->v5165(VarCurr,bitIndex9)).
% 297.46/295.68  all VarCurr (-v5263(VarCurr)<->v5264(VarCurr)).
% 297.46/295.68  all VarCurr (v5264(VarCurr)<->v5265(VarCurr)&v5268(VarCurr)).
% 297.46/295.68  all VarCurr (v5268(VarCurr)<->v5267(VarCurr)|v5269(VarCurr)).
% 297.46/295.68  all VarCurr (-v5269(VarCurr)<->v5094(VarCurr,bitIndex9)).
% 297.46/295.68  all VarCurr (v5265(VarCurr)<->v5266(VarCurr)|v5094(VarCurr,bitIndex9)).
% 297.46/295.68  all VarCurr (-v5266(VarCurr)<->v5267(VarCurr)).
% 297.46/295.68  all VarCurr (v5267(VarCurr)<->v5255(VarCurr)&v5257(VarCurr)).
% 297.46/295.68  all VarCurr (v5148(VarCurr)<->v5149(VarCurr)|v5260(VarCurr)).
% 297.46/295.68  all VarCurr (v5260(VarCurr)<->v5252(VarCurr)&v5165(VarCurr,bitIndex8)).
% 297.46/295.68  all VarCurr (v5149(VarCurr)<->v5150(VarCurr)&v5249(VarCurr)).
% 297.46/295.68  all VarCurr (v5249(VarCurr)<->v5250(VarCurr)&v5259(VarCurr)).
% 297.46/295.68  all VarCurr (v5259(VarCurr)<->v5252(VarCurr)|v5165(VarCurr,bitIndex8)).
% 297.46/295.68  all VarCurr (v5250(VarCurr)<->v5251(VarCurr)|v5258(VarCurr)).
% 297.46/295.68  all VarCurr (-v5258(VarCurr)<->v5165(VarCurr,bitIndex8)).
% 297.46/295.68  all VarCurr (-v5251(VarCurr)<->v5252(VarCurr)).
% 297.46/295.68  all VarCurr (v5252(VarCurr)<->v5253(VarCurr)&v5256(VarCurr)).
% 297.46/295.68  all VarCurr (v5256(VarCurr)<->v5255(VarCurr)|v5257(VarCurr)).
% 297.46/295.68  all VarCurr (-v5257(VarCurr)<->v5094(VarCurr,bitIndex8)).
% 297.46/295.68  all VarCurr (v5253(VarCurr)<->v5254(VarCurr)|v5094(VarCurr,bitIndex8)).
% 297.46/295.68  all VarCurr (-v5254(VarCurr)<->v5255(VarCurr)).
% 297.46/295.68  all VarCurr (v5255(VarCurr)<->v5243(VarCurr)&v5245(VarCurr)).
% 297.46/295.68  all VarCurr (v5150(VarCurr)<->v5151(VarCurr)|v5248(VarCurr)).
% 297.46/295.68  all VarCurr (v5248(VarCurr)<->v5240(VarCurr)&v5165(VarCurr,bitIndex7)).
% 297.46/295.68  all VarCurr (v5151(VarCurr)<->v5152(VarCurr)&v5237(VarCurr)).
% 297.46/295.68  all VarCurr (v5237(VarCurr)<->v5238(VarCurr)&v5247(VarCurr)).
% 297.46/295.68  all VarCurr (v5247(VarCurr)<->v5240(VarCurr)|v5165(VarCurr,bitIndex7)).
% 297.46/295.68  all VarCurr (v5238(VarCurr)<->v5239(VarCurr)|v5246(VarCurr)).
% 297.46/295.68  all VarCurr (-v5246(VarCurr)<->v5165(VarCurr,bitIndex7)).
% 297.46/295.68  all VarCurr (-v5239(VarCurr)<->v5240(VarCurr)).
% 297.46/295.68  all VarCurr (v5240(VarCurr)<->v5241(VarCurr)&v5244(VarCurr)).
% 297.46/295.68  all VarCurr (v5244(VarCurr)<->v5243(VarCurr)|v5245(VarCurr)).
% 297.46/295.68  all VarCurr (-v5245(VarCurr)<->v5094(VarCurr,bitIndex7)).
% 297.46/295.68  all VarCurr (v5241(VarCurr)<->v5242(VarCurr)|v5094(VarCurr,bitIndex7)).
% 297.46/295.68  all VarCurr (-v5242(VarCurr)<->v5243(VarCurr)).
% 297.46/295.68  all VarCurr (v5243(VarCurr)<->v5231(VarCurr)&v5233(VarCurr)).
% 297.46/295.68  all VarCurr (v5152(VarCurr)<->v5153(VarCurr)|v5236(VarCurr)).
% 297.46/295.68  all VarCurr (v5236(VarCurr)<->v5228(VarCurr)&v5165(VarCurr,bitIndex6)).
% 297.46/295.68  all VarCurr (v5153(VarCurr)<->v5154(VarCurr)&v5225(VarCurr)).
% 297.46/295.68  all VarCurr (v5225(VarCurr)<->v5226(VarCurr)&v5235(VarCurr)).
% 297.46/295.68  all VarCurr (v5235(VarCurr)<->v5228(VarCurr)|v5165(VarCurr,bitIndex6)).
% 297.46/295.68  all VarCurr (v5226(VarCurr)<->v5227(VarCurr)|v5234(VarCurr)).
% 297.46/295.68  all VarCurr (-v5234(VarCurr)<->v5165(VarCurr,bitIndex6)).
% 297.46/295.68  all VarCurr (-v5227(VarCurr)<->v5228(VarCurr)).
% 297.46/295.68  all VarCurr (v5228(VarCurr)<->v5229(VarCurr)&v5232(VarCurr)).
% 297.46/295.68  all VarCurr (v5232(VarCurr)<->v5231(VarCurr)|v5233(VarCurr)).
% 297.46/295.69  all VarCurr (-v5233(VarCurr)<->v5094(VarCurr,bitIndex6)).
% 297.46/295.69  all VarCurr (v5229(VarCurr)<->v5230(VarCurr)|v5094(VarCurr,bitIndex6)).
% 297.46/295.69  all VarCurr (-v5230(VarCurr)<->v5231(VarCurr)).
% 297.46/295.69  all VarCurr (v5231(VarCurr)<->v5219(VarCurr)&v5221(VarCurr)).
% 297.46/295.69  all VarCurr (v5154(VarCurr)<->v5155(VarCurr)|v5224(VarCurr)).
% 297.46/295.69  all VarCurr (v5224(VarCurr)<->v5216(VarCurr)&v5165(VarCurr,bitIndex5)).
% 297.46/295.69  all VarCurr (v5155(VarCurr)<->v5156(VarCurr)&v5213(VarCurr)).
% 297.46/295.69  all VarCurr (v5213(VarCurr)<->v5214(VarCurr)&v5223(VarCurr)).
% 297.46/295.69  all VarCurr (v5223(VarCurr)<->v5216(VarCurr)|v5165(VarCurr,bitIndex5)).
% 297.46/295.69  all VarCurr (v5214(VarCurr)<->v5215(VarCurr)|v5222(VarCurr)).
% 297.46/295.69  all VarCurr (-v5222(VarCurr)<->v5165(VarCurr,bitIndex5)).
% 297.46/295.69  all VarCurr (-v5215(VarCurr)<->v5216(VarCurr)).
% 297.46/295.69  all VarCurr (v5216(VarCurr)<->v5217(VarCurr)&v5220(VarCurr)).
% 297.46/295.69  all VarCurr (v5220(VarCurr)<->v5219(VarCurr)|v5221(VarCurr)).
% 297.46/295.69  all VarCurr (-v5221(VarCurr)<->v5094(VarCurr,bitIndex5)).
% 297.46/295.69  all VarCurr (v5217(VarCurr)<->v5218(VarCurr)|v5094(VarCurr,bitIndex5)).
% 297.46/295.69  all VarCurr (-v5218(VarCurr)<->v5219(VarCurr)).
% 297.46/295.69  all VarCurr (v5219(VarCurr)<->v5207(VarCurr)&v5209(VarCurr)).
% 297.46/295.69  all VarCurr (v5156(VarCurr)<->v5157(VarCurr)|v5212(VarCurr)).
% 297.46/295.69  all VarCurr (v5212(VarCurr)<->v5204(VarCurr)&v5165(VarCurr,bitIndex4)).
% 297.46/295.69  all VarCurr (v5157(VarCurr)<->v5158(VarCurr)&v5201(VarCurr)).
% 297.46/295.69  all VarCurr (v5201(VarCurr)<->v5202(VarCurr)&v5211(VarCurr)).
% 297.46/295.69  all VarCurr (v5211(VarCurr)<->v5204(VarCurr)|v5165(VarCurr,bitIndex4)).
% 297.46/295.69  all VarCurr (v5202(VarCurr)<->v5203(VarCurr)|v5210(VarCurr)).
% 297.46/295.69  all VarCurr (-v5210(VarCurr)<->v5165(VarCurr,bitIndex4)).
% 297.46/295.69  all VarCurr (-v5203(VarCurr)<->v5204(VarCurr)).
% 297.46/295.69  all VarCurr (v5204(VarCurr)<->v5205(VarCurr)&v5208(VarCurr)).
% 297.46/295.69  all VarCurr (v5208(VarCurr)<->v5207(VarCurr)|v5209(VarCurr)).
% 297.46/295.69  all VarCurr (-v5209(VarCurr)<->v5094(VarCurr,bitIndex4)).
% 297.46/295.69  all VarCurr (v5205(VarCurr)<->v5206(VarCurr)|v5094(VarCurr,bitIndex4)).
% 297.46/295.69  all VarCurr (-v5206(VarCurr)<->v5207(VarCurr)).
% 297.46/295.69  all VarCurr (v5207(VarCurr)<->v5195(VarCurr)&v5197(VarCurr)).
% 297.46/295.69  all VarCurr (v5158(VarCurr)<->v5159(VarCurr)|v5200(VarCurr)).
% 297.46/295.69  all VarCurr (v5200(VarCurr)<->v5192(VarCurr)&v5165(VarCurr,bitIndex3)).
% 297.46/295.69  all VarCurr (v5159(VarCurr)<->v5160(VarCurr)&v5189(VarCurr)).
% 297.46/295.69  all VarCurr (v5189(VarCurr)<->v5190(VarCurr)&v5199(VarCurr)).
% 297.46/295.69  all VarCurr (v5199(VarCurr)<->v5192(VarCurr)|v5165(VarCurr,bitIndex3)).
% 297.46/295.69  all VarCurr (v5190(VarCurr)<->v5191(VarCurr)|v5198(VarCurr)).
% 297.46/295.69  all VarCurr (-v5198(VarCurr)<->v5165(VarCurr,bitIndex3)).
% 297.46/295.69  all VarCurr (-v5191(VarCurr)<->v5192(VarCurr)).
% 297.46/295.69  all VarCurr (v5192(VarCurr)<->v5193(VarCurr)&v5196(VarCurr)).
% 297.46/295.69  all VarCurr (v5196(VarCurr)<->v5195(VarCurr)|v5197(VarCurr)).
% 297.46/295.69  all VarCurr (-v5197(VarCurr)<->v5094(VarCurr,bitIndex3)).
% 297.46/295.69  all VarCurr (v5193(VarCurr)<->v5194(VarCurr)|v5094(VarCurr,bitIndex3)).
% 297.46/295.69  all VarCurr (-v5194(VarCurr)<->v5195(VarCurr)).
% 297.46/295.69  all VarCurr (v5195(VarCurr)<->v5183(VarCurr)&v5185(VarCurr)).
% 297.46/295.69  all VarCurr (v5160(VarCurr)<->v5161(VarCurr)|v5188(VarCurr)).
% 297.46/295.69  all VarCurr (v5188(VarCurr)<->v5180(VarCurr)&v5165(VarCurr,bitIndex2)).
% 297.46/295.69  all VarCurr (v5161(VarCurr)<->v5162(VarCurr)&v5177(VarCurr)).
% 297.46/295.69  all VarCurr (v5177(VarCurr)<->v5178(VarCurr)&v5187(VarCurr)).
% 297.46/295.69  all VarCurr (v5187(VarCurr)<->v5180(VarCurr)|v5165(VarCurr,bitIndex2)).
% 297.46/295.69  all VarCurr (v5178(VarCurr)<->v5179(VarCurr)|v5186(VarCurr)).
% 297.46/295.69  all VarCurr (-v5186(VarCurr)<->v5165(VarCurr,bitIndex2)).
% 297.46/295.69  all VarCurr (-v5179(VarCurr)<->v5180(VarCurr)).
% 297.46/295.69  all VarCurr (v5180(VarCurr)<->v5181(VarCurr)&v5184(VarCurr)).
% 297.46/295.69  all VarCurr (v5184(VarCurr)<->v5183(VarCurr)|v5185(VarCurr)).
% 297.46/295.69  all VarCurr (-v5185(VarCurr)<->v5094(VarCurr,bitIndex2)).
% 297.46/295.69  all VarCurr (v5181(VarCurr)<->v5182(VarCurr)|v5094(VarCurr,bitIndex2)).
% 297.46/295.69  all VarCurr (-v5182(VarCurr)<->v5183(VarCurr)).
% 297.46/295.69  all VarCurr (v5183(VarCurr)<->v5172(VarCurr)&v5173(VarCurr)).
% 297.46/295.69  all VarCurr (v5162(VarCurr)<->v5163(VarCurr)|v5176(VarCurr)).
% 297.46/295.69  all VarCurr (v5176(VarCurr)<->v5169(VarCurr)&v5165(VarCurr,bitIndex1)).
% 297.46/295.69  all VarCurr (v5163(VarCurr)<->v5164(VarCurr)&v5166(VarCurr)).
% 297.46/295.69  all VarCurr (v5166(VarCurr)<->v5167(VarCurr)&v5175(VarCurr)).
% 297.46/295.69  all VarCurr (v5175(VarCurr)<->v5169(VarCurr)|v5165(VarCurr,bitIndex1)).
% 297.46/295.69  all VarCurr (v5167(VarCurr)<->v5168(VarCurr)|v5174(VarCurr)).
% 297.46/295.70  all VarCurr (-v5174(VarCurr)<->v5165(VarCurr,bitIndex1)).
% 297.46/295.70  all VarCurr (-v5168(VarCurr)<->v5169(VarCurr)).
% 297.46/295.70  all VarCurr (v5169(VarCurr)<->v5170(VarCurr)&v5171(VarCurr)).
% 297.46/295.70  all VarCurr (v5171(VarCurr)<->v5172(VarCurr)|v5173(VarCurr)).
% 297.46/295.70  all VarCurr (-v5173(VarCurr)<->v5094(VarCurr,bitIndex1)).
% 297.46/295.70  all VarCurr (-v5172(VarCurr)<->v5094(VarCurr,bitIndex0)).
% 297.46/295.70  all VarCurr (v5170(VarCurr)<->v5094(VarCurr,bitIndex0)|v5094(VarCurr,bitIndex1)).
% 297.46/295.70  all VarCurr (v5164(VarCurr)<->v5094(VarCurr,bitIndex0)&v5165(VarCurr,bitIndex0)).
% 297.46/295.70  all VarCurr B (range_26_0(B)-> (v5165(VarCurr,B)<->v5078(VarCurr,B))).
% 297.46/295.70  all VarCurr (v5165(VarCurr,bitIndex27)<->$F).
% 297.46/295.70  all VarCurr ((v5094(VarCurr,bitIndex5)<->v5049(VarCurr,bitIndex12))& (v5094(VarCurr,bitIndex4)<->v5049(VarCurr,bitIndex11))& (v5094(VarCurr,bitIndex3)<->v5049(VarCurr,bitIndex10))& (v5094(VarCurr,bitIndex2)<->v5049(VarCurr,bitIndex9))& (v5094(VarCurr,bitIndex1)<->v5049(VarCurr,bitIndex8))& (v5094(VarCurr,bitIndex0)<->v5049(VarCurr,bitIndex7))).
% 297.46/295.70  all VarCurr B (range_12_7(B)-> (v5049(VarCurr,B)<->v5074(VarCurr,B))).
% 297.46/295.70  all VarCurr B (range_12_7(B)-> (v5065(VarCurr,B)<->v5067(VarCurr,B))).
% 297.46/295.70  all VarCurr (v5067(VarCurr,bitIndex12)<->v5096(VarCurr)).
% 297.46/295.70  v5096(constB0)<->$F.
% 297.46/295.70  all VarCurr (v5067(VarCurr,bitIndex11)<->v5098(VarCurr)).
% 297.46/295.70  v5098(constB0)<->$F.
% 297.46/295.70  all VarCurr (v5067(VarCurr,bitIndex10)<->v5100(VarCurr)).
% 297.46/295.70  v5100(constB0)<->$F.
% 297.46/295.70  all VarCurr (v5067(VarCurr,bitIndex9)<->v5102(VarCurr)).
% 297.46/295.70  v5102(constB0)<->$F.
% 297.46/295.70  all VarCurr (v5067(VarCurr,bitIndex8)<->v5104(VarCurr)).
% 297.46/295.70  v5104(constB0)<->$F.
% 297.46/295.70  all VarCurr (v5067(VarCurr,bitIndex7)<->v5106(VarCurr)).
% 297.46/295.70  v5106(constB0)<->$F.
% 297.46/295.70  all VarCurr B (range_12_7(B)-> (v5057(VarCurr,B)<->v5059(VarCurr,B))).
% 297.46/295.70  all VarCurr B (range_12_7(B)-> (v5059(VarCurr,B)<->v5061(VarCurr,B))).
% 297.46/295.70  all VarCurr B (range_12_7(B)-> (v5061(VarCurr,B)<->v5063(VarCurr,B))).
% 297.46/295.70  all VarCurr B (range_12_7(B)-> (v5063(VarCurr,B)<->v543(VarCurr,B))).
% 297.46/295.70  all B (range_12_7(B)<->bitIndex7=B|bitIndex8=B|bitIndex9=B|bitIndex10=B|bitIndex11=B|bitIndex12=B).
% 297.46/295.70  all VarCurr B (range_5_0(B)-> (v5078(VarCurr,B)<->v5079(VarCurr,B))).
% 297.46/295.70  all VarCurr (-v5080(VarCurr)& -v5081(VarCurr)& -v5083(VarCurr)& -v5084(VarCurr)& -v5086(VarCurr)& -v5087(VarCurr)& -v5090(VarCurr)-> (all B (range_26_0(B)-> (v5079(VarCurr,B)<->$F)))).
% 297.46/295.70  all VarCurr (v5090(VarCurr)-> (all B (range_26_0(B)-> (v5079(VarCurr,B)<->$F)))).
% 297.46/295.70  all VarCurr (v5087(VarCurr)-> (all B (range_26_0(B)-> (v5079(VarCurr,B)<->v5088(VarCurr,B))))).
% 297.46/295.70  all VarCurr (v5086(VarCurr)-> (all B (range_26_0(B)-> (v5079(VarCurr,B)<->$F)))).
% 297.46/295.70  all VarCurr (v5084(VarCurr)-> (all B (range_26_0(B)-> (v5079(VarCurr,B)<->v5085(VarCurr,B))))).
% 297.46/295.70  all VarCurr (v5083(VarCurr)-> (all B (range_26_0(B)-> (v5079(VarCurr,B)<->$F)))).
% 297.46/295.70  all VarCurr (v5081(VarCurr)-> (all B (range_26_0(B)-> (v5079(VarCurr,B)<->v5082(VarCurr,B))))).
% 297.46/295.70  all VarCurr (v5080(VarCurr)-> (v5079(VarCurr,bitIndex26)<->v5040(VarCurr,bitIndex37))& (v5079(VarCurr,bitIndex25)<->v5040(VarCurr,bitIndex36))& (v5079(VarCurr,bitIndex24)<->v5040(VarCurr,bitIndex35))& (v5079(VarCurr,bitIndex23)<->v5040(VarCurr,bitIndex34))& (v5079(VarCurr,bitIndex22)<->v5040(VarCurr,bitIndex33))& (v5079(VarCurr,bitIndex21)<->v5040(VarCurr,bitIndex32))& (v5079(VarCurr,bitIndex20)<->v5040(VarCurr,bitIndex31))& (v5079(VarCurr,bitIndex19)<->v5040(VarCurr,bitIndex30))& (v5079(VarCurr,bitIndex18)<->v5040(VarCurr,bitIndex29))& (v5079(VarCurr,bitIndex17)<->v5040(VarCurr,bitIndex28))& (v5079(VarCurr,bitIndex16)<->v5040(VarCurr,bitIndex27))& (v5079(VarCurr,bitIndex15)<->v5040(VarCurr,bitIndex26))& (v5079(VarCurr,bitIndex14)<->v5040(VarCurr,bitIndex25))& (v5079(VarCurr,bitIndex13)<->v5040(VarCurr,bitIndex24))& (v5079(VarCurr,bitIndex12)<->v5040(VarCurr,bitIndex23))& (v5079(VarCurr,bitIndex11)<->v5040(VarCurr,bitIndex22))& (v5079(VarCurr,bitIndex10)<->v5040(VarCurr,bitIndex21))& (v5079(VarCurr,bitIndex9)<->v5040(VarCurr,bitIndex20))& (v5079(VarCurr,bitIndex8)<->v5040(VarCurr,bitIndex19))& (v5079(VarCurr,bitIndex7)<->v5040(VarCurr,bitIndex18))& (v5079(VarCurr,bitIndex6)<->v5040(VarCurr,bitIndex17))& (v5079(VarCurr,bitIndex5)<->v5040(VarCurr,bitIndex16))& (v5079(VarCurr,bitIndex4)<->v5040(VarCurr,bitIndex15))& (v5079(VarCurr,bitIndex3)<->v5040(VarCurr,bitIndex14))& (v5079(VarCurr,bitIndex2)<->v5040(VarCurr,bitIndex13))& (v5079(VarCurr,bitIndex1)<->v5040(VarCurr,bitIndex12))& (v5079(VarCurr,bitIndex0)<->v5040(VarCurr,bitIndex11))).
% 297.46/295.71  all VarCurr (v5090(VarCurr)<->v5091(VarCurr)|v5092(VarCurr)).
% 297.46/295.71  all VarCurr (v5092(VarCurr)<-> (v5047(VarCurr,bitIndex2)<->$T)& (v5047(VarCurr,bitIndex1)<->$T)& (v5047(VarCurr,bitIndex0)<->$T)).
% 297.46/295.71  all VarCurr (v5091(VarCurr)<-> (v5047(VarCurr,bitIndex2)<->$T)& (v5047(VarCurr,bitIndex1)<->$T)& (v5047(VarCurr,bitIndex0)<->$F)).
% 297.46/295.71  all VarCurr ((v5088(VarCurr,bitIndex11)<->v5040(VarCurr,bitIndex37))& (v5088(VarCurr,bitIndex10)<->v5040(VarCurr,bitIndex36))& (v5088(VarCurr,bitIndex9)<->v5040(VarCurr,bitIndex35))& (v5088(VarCurr,bitIndex8)<->v5040(VarCurr,bitIndex34))& (v5088(VarCurr,bitIndex7)<->v5040(VarCurr,bitIndex33))& (v5088(VarCurr,bitIndex6)<->v5040(VarCurr,bitIndex32))& (v5088(VarCurr,bitIndex5)<->v5040(VarCurr,bitIndex31))& (v5088(VarCurr,bitIndex4)<->v5040(VarCurr,bitIndex30))& (v5088(VarCurr,bitIndex3)<->v5040(VarCurr,bitIndex29))& (v5088(VarCurr,bitIndex2)<->v5040(VarCurr,bitIndex28))& (v5088(VarCurr,bitIndex1)<->v5040(VarCurr,bitIndex27))& (v5088(VarCurr,bitIndex0)<->v5040(VarCurr,bitIndex26))).
% 297.46/295.71  all VarCurr ((v5088(VarCurr,bitIndex26)<->$F)& (v5088(VarCurr,bitIndex25)<->$F)& (v5088(VarCurr,bitIndex24)<->$F)& (v5088(VarCurr,bitIndex23)<->$F)& (v5088(VarCurr,bitIndex22)<->$F)& (v5088(VarCurr,bitIndex21)<->$F)& (v5088(VarCurr,bitIndex20)<->$F)& (v5088(VarCurr,bitIndex19)<->$F)& (v5088(VarCurr,bitIndex18)<->$F)& (v5088(VarCurr,bitIndex17)<->$F)& (v5088(VarCurr,bitIndex16)<->$F)& (v5088(VarCurr,bitIndex15)<->$F)& (v5088(VarCurr,bitIndex14)<->$F)& (v5088(VarCurr,bitIndex13)<->$F)& (v5088(VarCurr,bitIndex12)<->$F)).
% 297.46/295.71  -b000000000000000(bitIndex14).
% 297.46/295.71  -b000000000000000(bitIndex13).
% 297.46/295.71  -b000000000000000(bitIndex12).
% 297.46/295.71  -b000000000000000(bitIndex11).
% 297.46/295.71  -b000000000000000(bitIndex10).
% 297.46/295.71  -b000000000000000(bitIndex9).
% 297.46/295.71  -b000000000000000(bitIndex8).
% 297.46/295.71  -b000000000000000(bitIndex7).
% 297.46/295.71  -b000000000000000(bitIndex6).
% 297.46/295.71  -b000000000000000(bitIndex5).
% 297.46/295.71  -b000000000000000(bitIndex4).
% 297.46/295.71  -b000000000000000(bitIndex3).
% 297.46/295.71  -b000000000000000(bitIndex2).
% 297.46/295.71  -b000000000000000(bitIndex1).
% 297.46/295.71  -b000000000000000(bitIndex0).
% 297.46/295.71  all VarCurr (v5087(VarCurr)<-> (v5047(VarCurr,bitIndex2)<->$T)& (v5047(VarCurr,bitIndex1)<->$F)& (v5047(VarCurr,bitIndex0)<->$T)).
% 297.46/295.71  all VarCurr (v5086(VarCurr)<-> (v5047(VarCurr,bitIndex2)<->$T)& (v5047(VarCurr,bitIndex1)<->$F)& (v5047(VarCurr,bitIndex0)<->$F)).
% 297.46/295.71  all VarCurr ((v5085(VarCurr,bitIndex17)<->v5040(VarCurr,bitIndex37))& (v5085(VarCurr,bitIndex16)<->v5040(VarCurr,bitIndex36))& (v5085(VarCurr,bitIndex15)<->v5040(VarCurr,bitIndex35))& (v5085(VarCurr,bitIndex14)<->v5040(VarCurr,bitIndex34))& (v5085(VarCurr,bitIndex13)<->v5040(VarCurr,bitIndex33))& (v5085(VarCurr,bitIndex12)<->v5040(VarCurr,bitIndex32))& (v5085(VarCurr,bitIndex11)<->v5040(VarCurr,bitIndex31))& (v5085(VarCurr,bitIndex10)<->v5040(VarCurr,bitIndex30))& (v5085(VarCurr,bitIndex9)<->v5040(VarCurr,bitIndex29))& (v5085(VarCurr,bitIndex8)<->v5040(VarCurr,bitIndex28))& (v5085(VarCurr,bitIndex7)<->v5040(VarCurr,bitIndex27))& (v5085(VarCurr,bitIndex6)<->v5040(VarCurr,bitIndex26))& (v5085(VarCurr,bitIndex5)<->v5040(VarCurr,bitIndex25))& (v5085(VarCurr,bitIndex4)<->v5040(VarCurr,bitIndex24))& (v5085(VarCurr,bitIndex3)<->v5040(VarCurr,bitIndex23))& (v5085(VarCurr,bitIndex2)<->v5040(VarCurr,bitIndex22))& (v5085(VarCurr,bitIndex1)<->v5040(VarCurr,bitIndex21))& (v5085(VarCurr,bitIndex0)<->v5040(VarCurr,bitIndex20))).
% 297.46/295.71  all VarCurr ((v5085(VarCurr,bitIndex26)<->$F)& (v5085(VarCurr,bitIndex25)<->$F)& (v5085(VarCurr,bitIndex24)<->$F)& (v5085(VarCurr,bitIndex23)<->$F)& (v5085(VarCurr,bitIndex22)<->$F)& (v5085(VarCurr,bitIndex21)<->$F)& (v5085(VarCurr,bitIndex20)<->$F)& (v5085(VarCurr,bitIndex19)<->$F)& (v5085(VarCurr,bitIndex18)<->$F)).
% 297.46/295.71  all VarCurr (v5084(VarCurr)<-> (v5047(VarCurr,bitIndex2)<->$F)& (v5047(VarCurr,bitIndex1)<->$T)& (v5047(VarCurr,bitIndex0)<->$T)).
% 297.46/295.71  all VarCurr (v5083(VarCurr)<-> (v5047(VarCurr,bitIndex2)<->$F)& (v5047(VarCurr,bitIndex1)<->$T)& (v5047(VarCurr,bitIndex0)<->$F)).
% 297.46/295.71  all VarCurr ((v5082(VarCurr,bitIndex23)<->v5040(VarCurr,bitIndex37))& (v5082(VarCurr,bitIndex22)<->v5040(VarCurr,bitIndex36))& (v5082(VarCurr,bitIndex21)<->v5040(VarCurr,bitIndex35))& (v5082(VarCurr,bitIndex20)<->v5040(VarCurr,bitIndex34))& (v5082(VarCurr,bitIndex19)<->v5040(VarCurr,bitIndex33))& (v5082(VarCurr,bitIndex18)<->v5040(VarCurr,bitIndex32))& (v5082(VarCurr,bitIndex17)<->v5040(VarCurr,bitIndex31))& (v5082(VarCurr,bitIndex16)<->v5040(VarCurr,bitIndex30))& (v5082(VarCurr,bitIndex15)<->v5040(VarCurr,bitIndex29))& (v5082(VarCurr,bitIndex14)<->v5040(VarCurr,bitIndex28))& (v5082(VarCurr,bitIndex13)<->v5040(VarCurr,bitIndex27))& (v5082(VarCurr,bitIndex12)<->v5040(VarCurr,bitIndex26))& (v5082(VarCurr,bitIndex11)<->v5040(VarCurr,bitIndex25))& (v5082(VarCurr,bitIndex10)<->v5040(VarCurr,bitIndex24))& (v5082(VarCurr,bitIndex9)<->v5040(VarCurr,bitIndex23))& (v5082(VarCurr,bitIndex8)<->v5040(VarCurr,bitIndex22))& (v5082(VarCurr,bitIndex7)<->v5040(VarCurr,bitIndex21))& (v5082(VarCurr,bitIndex6)<->v5040(VarCurr,bitIndex20))& (v5082(VarCurr,bitIndex5)<->v5040(VarCurr,bitIndex19))& (v5082(VarCurr,bitIndex4)<->v5040(VarCurr,bitIndex18))& (v5082(VarCurr,bitIndex3)<->v5040(VarCurr,bitIndex17))& (v5082(VarCurr,bitIndex2)<->v5040(VarCurr,bitIndex16))& (v5082(VarCurr,bitIndex1)<->v5040(VarCurr,bitIndex15))& (v5082(VarCurr,bitIndex0)<->v5040(VarCurr,bitIndex14))).
% 297.46/295.71  all VarCurr ((v5082(VarCurr,bitIndex26)<->$F)& (v5082(VarCurr,bitIndex25)<->$F)& (v5082(VarCurr,bitIndex24)<->$F)).
% 297.46/295.71  all VarCurr (v5081(VarCurr)<-> (v5047(VarCurr,bitIndex2)<->$F)& (v5047(VarCurr,bitIndex1)<->$F)& (v5047(VarCurr,bitIndex0)<->$T)).
% 297.46/295.71  all VarCurr (v5080(VarCurr)<-> (v5047(VarCurr,bitIndex2)<->$F)& (v5047(VarCurr,bitIndex1)<->$F)& (v5047(VarCurr,bitIndex0)<->$F)).
% 297.46/295.71  all VarCurr ((v5047(VarCurr,bitIndex2)<->v5049(VarCurr,bitIndex6))& (v5047(VarCurr,bitIndex1)<->v5049(VarCurr,bitIndex5))& (v5047(VarCurr,bitIndex0)<->v5049(VarCurr,bitIndex4))).
% 297.46/295.71  all VarCurr B (range_6_4(B)-> (v5049(VarCurr,B)<->v5074(VarCurr,B))).
% 297.46/295.71  all VarCurr (-v5051(VarCurr)-> (all B (range_63_0(B)-> (v5074(VarCurr,B)<->v5065(VarCurr,B))))).
% 297.46/295.71  all VarCurr (v5051(VarCurr)-> (all B (range_63_0(B)-> (v5074(VarCurr,B)<->v5057(VarCurr,B))))).
% 297.46/295.71  all VarCurr B (range_6_4(B)-> (v5065(VarCurr,B)<->v5067(VarCurr,B))).
% 297.46/295.71  all VarCurr (v5067(VarCurr,bitIndex6)<->v5069(VarCurr)).
% 297.46/295.71  v5069(constB0)<->$F.
% 297.46/295.71  all VarCurr (v5067(VarCurr,bitIndex5)<->v5071(VarCurr)).
% 297.46/295.71  v5071(constB0)<->$F.
% 297.46/295.71  all VarCurr (v5067(VarCurr,bitIndex4)<->v5073(VarCurr)).
% 297.46/295.71  v5073(constB0)<->$F.
% 297.46/295.71  all VarCurr B (range_6_4(B)-> (v5057(VarCurr,B)<->v5059(VarCurr,B))).
% 297.46/295.71  all VarCurr B (range_6_4(B)-> (v5059(VarCurr,B)<->v5061(VarCurr,B))).
% 297.46/295.71  all VarCurr B (range_6_4(B)-> (v5061(VarCurr,B)<->v5063(VarCurr,B))).
% 297.46/295.71  all VarCurr B (range_6_4(B)-> (v5063(VarCurr,B)<->v543(VarCurr,B))).
% 297.46/295.71  all B (range_6_4(B)<->bitIndex4=B|bitIndex5=B|bitIndex6=B).
% 297.46/295.71  all VarCurr (v5051(VarCurr)<->v5053(VarCurr)).
% 297.46/295.71  all VarCurr (v5053(VarCurr)<->v5055(VarCurr)).
% 297.46/295.71  all VarCurr (v5038(VarCurr)<-> (v5040(VarCurr,bitIndex61)<->$T)& (v5040(VarCurr,bitIndex60)<->$T)& (v5040(VarCurr,bitIndex59)<->$T)& (v5040(VarCurr,bitIndex58)<->$T)& (v5040(VarCurr,bitIndex57)<->$T)& (v5040(VarCurr,bitIndex56)<->$T)& (v5040(VarCurr,bitIndex55)<->$T)& (v5040(VarCurr,bitIndex54)<->$T)& (v5040(VarCurr,bitIndex53)<->$T)& (v5040(VarCurr,bitIndex52)<->$T)& (v5040(VarCurr,bitIndex51)<->$T)& (v5040(VarCurr,bitIndex50)<->$T)& (v5040(VarCurr,bitIndex49)<->$T)& (v5040(VarCurr,bitIndex48)<->$T)& (v5040(VarCurr,bitIndex47)<->$F)& (v5040(VarCurr,bitIndex46)<->$F)& (v5040(VarCurr,bitIndex45)<->$F)& (v5040(VarCurr,bitIndex44)<->$F)& (v5040(VarCurr,bitIndex43)<->$F)& (v5040(VarCurr,bitIndex42)<->$F)& (v5040(VarCurr,bitIndex41)<->$F)& (v5040(VarCurr,bitIndex40)<->$F)& (v5040(VarCurr,bitIndex39)<->$F)& (v5040(VarCurr,bitIndex38)<->$F)& (v5040(VarCurr,bitIndex37)<->$F)).
% 297.46/295.71  b1111111111111100000000000(bitIndex24).
% 297.46/295.71  b1111111111111100000000000(bitIndex23).
% 297.46/295.71  b1111111111111100000000000(bitIndex22).
% 297.46/295.71  b1111111111111100000000000(bitIndex21).
% 297.46/295.71  b1111111111111100000000000(bitIndex20).
% 297.46/295.71  b1111111111111100000000000(bitIndex19).
% 297.46/295.71  b1111111111111100000000000(bitIndex18).
% 297.52/295.72  b1111111111111100000000000(bitIndex17).
% 297.52/295.72  b1111111111111100000000000(bitIndex16).
% 297.52/295.72  b1111111111111100000000000(bitIndex15).
% 297.52/295.72  b1111111111111100000000000(bitIndex14).
% 297.52/295.72  b1111111111111100000000000(bitIndex13).
% 297.52/295.72  b1111111111111100000000000(bitIndex12).
% 297.52/295.72  b1111111111111100000000000(bitIndex11).
% 297.52/295.72  -b1111111111111100000000000(bitIndex10).
% 297.52/295.72  -b1111111111111100000000000(bitIndex9).
% 297.52/295.72  -b1111111111111100000000000(bitIndex8).
% 297.52/295.72  -b1111111111111100000000000(bitIndex7).
% 297.52/295.72  -b1111111111111100000000000(bitIndex6).
% 297.52/295.72  -b1111111111111100000000000(bitIndex5).
% 297.52/295.72  -b1111111111111100000000000(bitIndex4).
% 297.52/295.72  -b1111111111111100000000000(bitIndex3).
% 297.52/295.72  -b1111111111111100000000000(bitIndex2).
% 297.52/295.72  -b1111111111111100000000000(bitIndex1).
% 297.52/295.72  -b1111111111111100000000000(bitIndex0).
% 297.52/295.72  all B (range_84_0(B)-> (v5040(constB0,B)<->$F)).
% 297.52/295.72  all B (range_84_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B|bitIndex6=B|bitIndex7=B|bitIndex8=B|bitIndex9=B|bitIndex10=B|bitIndex11=B|bitIndex12=B|bitIndex13=B|bitIndex14=B|bitIndex15=B|bitIndex16=B|bitIndex17=B|bitIndex18=B|bitIndex19=B|bitIndex20=B|bitIndex21=B|bitIndex22=B|bitIndex23=B|bitIndex24=B|bitIndex25=B|bitIndex26=B|bitIndex27=B|bitIndex28=B|bitIndex29=B|bitIndex30=B|bitIndex31=B|bitIndex32=B|bitIndex33=B|bitIndex34=B|bitIndex35=B|bitIndex36=B|bitIndex37=B|bitIndex38=B|bitIndex39=B|bitIndex40=B|bitIndex41=B|bitIndex42=B|bitIndex43=B|bitIndex44=B|bitIndex45=B|bitIndex46=B|bitIndex47=B|bitIndex48=B|bitIndex49=B|bitIndex50=B|bitIndex51=B|bitIndex52=B|bitIndex53=B|bitIndex54=B|bitIndex55=B|bitIndex56=B|bitIndex57=B|bitIndex58=B|bitIndex59=B|bitIndex60=B|bitIndex61=B|bitIndex62=B|bitIndex63=B|bitIndex64=B|bitIndex65=B|bitIndex66=B|bitIndex67=B|bitIndex68=B|bitIndex69=B|bitIndex70=B|bitIndex71=B|bitIndex72=B|bitIndex73=B|bitIndex74=B|bitIndex75=B|bitIndex76=B|bitIndex77=B|bitIndex78=B|bitIndex79=B|bitIndex80=B|bitIndex81=B|bitIndex82=B|bitIndex83=B|bitIndex84=B).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex84).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex83).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex82).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex81).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex80).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex79).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex78).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex77).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex76).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex75).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex74).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex73).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex72).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex71).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex70).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex69).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex68).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex67).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex66).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex65).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex64).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex63).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex62).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex61).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex60).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex59).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex58).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex57).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex56).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex55).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex54).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex53).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex52).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex51).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex50).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex49).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex48).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex47).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex46).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex45).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex44).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex43).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex42).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex41).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex40).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex39).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex38).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex37).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex36).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex35).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex34).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex33).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex32).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex31).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex30).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex29).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex28).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex27).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex26).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex25).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex24).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex23).
% 297.52/295.72  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex22).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex21).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex20).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex19).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex18).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex17).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex16).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex15).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex14).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex13).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex12).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex11).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex10).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex9).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex8).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex7).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex6).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex5).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex4).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex3).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex2).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex1).
% 297.52/295.73  -b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex0).
% 297.52/295.73  all VarCurr (v5036(VarCurr)<->v4749(VarCurr)).
% 297.52/295.73  all VarCurr (v5020(VarCurr)<->v5022(VarCurr)).
% 297.52/295.73  all VarCurr (v5022(VarCurr)<->v5024(VarCurr)).
% 297.52/295.73  all VarCurr (v5024(VarCurr)<->v5026(VarCurr)).
% 297.52/295.73  all VarCurr (v5026(VarCurr)<->v815(VarCurr,bitIndex1)).
% 297.52/295.73  all VarCurr (v5017(VarCurr)<->v228(VarCurr)).
% 297.52/295.73  all VarCurr (v4897(VarCurr)<->v4899(VarCurr)).
% 297.52/295.73  all VarCurr (v4899(VarCurr)<->v4901(VarCurr)).
% 297.52/295.73  all VarCurr (v4901(VarCurr)<->v4903(VarCurr)).
% 297.52/295.73  all VarCurr (v4903(VarCurr)<->v4905(VarCurr)).
% 297.52/295.73  all VarCurr (v4905(VarCurr)<->v4907(VarCurr)).
% 297.52/295.73  all VarCurr (v4907(VarCurr)<->v4909(VarCurr)).
% 297.52/295.73  all VarCurr (v4909(VarCurr)<->v4911(VarCurr)).
% 297.52/295.73  all VarCurr (v4911(VarCurr)<->v4913(VarCurr)).
% 297.52/295.73  all VarCurr (v4913(VarCurr)<->v4915(VarCurr,bitIndex8)).
% 297.52/295.73  all VarCurr (v4915(VarCurr,bitIndex8)<->v4917(VarCurr,bitIndex8)).
% 297.52/295.73  all VarCurr (v4917(VarCurr,bitIndex8)<->v4919(VarCurr)).
% 297.52/295.73  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4970(VarNext)-> (v4919(VarNext)<->v4919(VarCurr)))).
% 297.52/295.73  all VarNext (v4970(VarNext)-> (v4919(VarNext)<->v5005(VarNext))).
% 297.52/295.73  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v5005(VarNext)<->v5003(VarCurr))).
% 297.52/295.73  all VarCurr (-v4921(VarCurr)-> (v5003(VarCurr)<->v5006(VarCurr))).
% 297.52/295.73  all VarCurr (v4921(VarCurr)-> (v5003(VarCurr)<->v4930(VarCurr))).
% 297.52/295.73  all VarCurr (-v4983(VarCurr)-> (v5006(VarCurr)<->v4961(VarCurr))).
% 297.52/295.73  all VarCurr (v4983(VarCurr)-> (v5006(VarCurr)<->v5007(VarCurr))).
% 297.52/295.73  all VarCurr (-v4986(VarCurr)& -v4988(VarCurr)-> (v5007(VarCurr)<->v5011(VarCurr))).
% 297.52/295.73  all VarCurr (v4988(VarCurr)-> (v5007(VarCurr)<->v5010(VarCurr))).
% 297.52/295.73  all VarCurr (v4986(VarCurr)-> (v5007(VarCurr)<->v5008(VarCurr))).
% 297.52/295.73  all VarCurr (-v4996(VarCurr)-> (v5011(VarCurr)<->v4961(VarCurr))).
% 297.52/295.73  all VarCurr (v4996(VarCurr)-> (v5011(VarCurr)<->$T)).
% 297.52/295.73  all VarCurr (-v4990(VarCurr)-> (v5010(VarCurr)<->v4961(VarCurr))).
% 297.52/295.73  all VarCurr (v4990(VarCurr)-> (v5010(VarCurr)<->$F)).
% 297.52/295.74  all VarCurr (-v5009(VarCurr)-> (v5008(VarCurr)<->$F)).
% 297.52/295.74  all VarCurr (v5009(VarCurr)-> (v5008(VarCurr)<->$T)).
% 297.52/295.74  all VarCurr (v5009(VarCurr)<-> (v4938(VarCurr)<->$T)).
% 297.52/295.74  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4970(VarNext)<->v4971(VarNext)&v4980(VarNext))).
% 297.52/295.74  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4980(VarNext)<->v4978(VarCurr))).
% 297.52/295.74  all VarCurr (v4978(VarCurr)<->v4921(VarCurr)|v4981(VarCurr)).
% 297.52/295.74  all VarCurr (v4981(VarCurr)<->v4982(VarCurr)&v5002(VarCurr)).
% 297.52/295.74  all VarCurr (-v5002(VarCurr)<->v4921(VarCurr)).
% 297.52/295.74  all VarCurr (v4982(VarCurr)<->v4983(VarCurr)|v5000(VarCurr)).
% 297.52/295.74  all VarCurr (v5000(VarCurr)<->v4944(VarCurr)&v5001(VarCurr)).
% 297.52/295.74  all VarCurr (-v5001(VarCurr)<->v4946(VarCurr)).
% 297.52/295.74  all VarCurr (v4983(VarCurr)<->v4984(VarCurr)&v4946(VarCurr)).
% 297.52/295.74  all VarCurr (v4984(VarCurr)<->v4985(VarCurr)|v4994(VarCurr)).
% 297.52/295.74  all VarCurr (v4994(VarCurr)<->v4995(VarCurr)&v4999(VarCurr)).
% 297.52/295.74  all VarCurr (v4999(VarCurr)<-> (v4987(VarCurr,bitIndex2)<->$F)& (v4987(VarCurr,bitIndex1)<->$F)& (v4987(VarCurr,bitIndex0)<->$T)).
% 297.52/295.74  all VarCurr (v4995(VarCurr)<->v4996(VarCurr)|v4997(VarCurr)).
% 297.52/295.74  all VarCurr (v4997(VarCurr)<->v4944(VarCurr)&v4998(VarCurr)).
% 297.52/295.74  all VarCurr (-v4998(VarCurr)<->v4996(VarCurr)).
% 297.52/295.74  all VarCurr (v4996(VarCurr)<-> (v4938(VarCurr)<->$T)).
% 297.52/295.74  all VarCurr (v4985(VarCurr)<->v4986(VarCurr)|v4988(VarCurr)).
% 297.52/295.74  all VarCurr (v4988(VarCurr)<->v4989(VarCurr)&v4993(VarCurr)).
% 297.52/295.74  all VarCurr (v4993(VarCurr)<-> (v4987(VarCurr,bitIndex2)<->$F)& (v4987(VarCurr,bitIndex1)<->$T)& (v4987(VarCurr,bitIndex0)<->$F)).
% 297.52/295.74  all VarCurr (v4989(VarCurr)<->v4990(VarCurr)|v4991(VarCurr)).
% 297.52/295.74  all VarCurr (v4991(VarCurr)<->v4944(VarCurr)&v4992(VarCurr)).
% 297.52/295.74  all VarCurr (-v4992(VarCurr)<->v4990(VarCurr)).
% 297.52/295.74  all VarCurr (v4990(VarCurr)<-> (v4938(VarCurr)<->$T)).
% 297.52/295.74  all VarCurr (v4986(VarCurr)<-> (v4987(VarCurr,bitIndex2)<->$T)& (v4987(VarCurr,bitIndex1)<->$F)& (v4987(VarCurr,bitIndex0)<->$F)).
% 297.52/295.74  all VarCurr (v4987(VarCurr,bitIndex0)<->v4936(VarCurr)).
% 297.52/295.74  all VarCurr (v4987(VarCurr,bitIndex1)<->v4934(VarCurr)).
% 297.52/295.74  all VarCurr (v4987(VarCurr,bitIndex2)<->v4932(VarCurr)).
% 297.52/295.74  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4971(VarNext)<->v4972(VarNext)&v4963(VarNext))).
% 297.52/295.74  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4972(VarNext)<->v4974(VarNext))).
% 297.52/295.74  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4974(VarNext)<->v4963(VarCurr))).
% 297.52/295.74  v4919(constB0)<->$F.
% 297.52/295.74  all VarCurr (v4963(VarCurr)<->v4965(VarCurr)).
% 297.52/295.74  all VarCurr (v4965(VarCurr)<->v4967(VarCurr)).
% 297.52/295.74  all VarCurr (v4967(VarCurr)<->v4847(VarCurr)).
% 297.52/295.74  all VarCurr (v4961(VarCurr)<->$F).
% 297.52/295.74  all VarCurr (v4946(VarCurr)<->v4948(VarCurr)).
% 297.52/295.74  all VarCurr (v4948(VarCurr)<->v4950(VarCurr)).
% 297.52/295.74  all VarCurr (v4950(VarCurr)<->v4952(VarCurr)).
% 297.52/295.74  all VarCurr (v4952(VarCurr)<->v4954(VarCurr)&v4836(VarCurr)).
% 297.52/295.74  all VarCurr (v4954(VarCurr)<->v4956(VarCurr)).
% 297.52/295.74  all VarCurr (v4956(VarCurr)<->v4958(VarCurr)).
% 297.52/295.74  v4958(constB0)<->$F.
% 297.52/295.74  all VarCurr (v4944(VarCurr)<->$F).
% 297.52/295.74  all VarCurr (v4938(VarCurr)<->v4940(VarCurr,bitIndex8)).
% 297.52/295.74  all VarCurr (v4940(VarCurr,bitIndex8)<->v4942(VarCurr,bitIndex8)).
% 297.52/295.74  all VarCurr (v4942(VarCurr,bitIndex8)<->v4800(VarCurr,bitIndex8)).
% 297.52/295.74  all VarCurr (v4800(VarCurr,bitIndex8)<->v4802(VarCurr,bitIndex8)).
% 297.52/295.74  all VarCurr (v4802(VarCurr,bitIndex8)<->v4804(VarCurr,bitIndex8)).
% 297.52/295.74  all VarCurr (v4804(VarCurr,bitIndex8)<->v4806(VarCurr,bitIndex8)).
% 297.52/295.74  all VarCurr (v4806(VarCurr,bitIndex8)<->v4808(VarCurr,bitIndex8)).
% 297.52/295.74  all VarCurr (v4808(VarCurr,bitIndex8)<->v4810(VarCurr,bitIndex8)).
% 297.52/295.74  all VarCurr (v4810(VarCurr,bitIndex8)<->v4812(VarCurr,bitIndex8)).
% 297.52/295.74  all VarCurr (v4812(VarCurr,bitIndex8)<->v4814(VarCurr,bitIndex8)).
% 297.52/295.74  all VarCurr (v4814(VarCurr,bitIndex8)<->v4816(VarCurr,bitIndex8)).
% 297.52/295.74  all VarCurr (v4816(VarCurr,bitIndex8)<->v4818(VarCurr,bitIndex8)).
% 297.52/295.74  all VarCurr (v4936(VarCurr)<->$F).
% 297.52/295.74  all VarCurr (v4934(VarCurr)<->$F).
% 297.52/295.74  all VarCurr (v4932(VarCurr)<->$T).
% 297.52/295.74  all VarCurr (v4930(VarCurr)<->$F).
% 297.52/295.74  all VarCurr (v4921(VarCurr)<->v4923(VarCurr)).
% 297.52/295.74  all VarCurr (-v4923(VarCurr)<->v4925(VarCurr)).
% 297.52/295.74  all VarCurr (v4925(VarCurr)<->v4927(VarCurr)).
% 297.52/295.74  all VarCurr (v4927(VarCurr)<->v4769(VarCurr)).
% 297.52/295.74  all VarCurr (v4747(VarCurr)<->v4749(VarCurr)).
% 297.52/295.74  all VarCurr (v4749(VarCurr)<->v4751(VarCurr)).
% 297.52/295.75  all VarCurr (v4751(VarCurr)<->v4753(VarCurr)).
% 297.52/295.75  all VarCurr (v4753(VarCurr)<->v4755(VarCurr)).
% 297.52/295.75  all VarCurr (v4755(VarCurr)<->v4757(VarCurr)).
% 297.52/295.75  all VarCurr (v4757(VarCurr)<->v260(VarCurr,bitIndex2)).
% 297.52/295.75  all VarCurr (v260(VarCurr,bitIndex2)<->v262(VarCurr,bitIndex2)).
% 297.52/295.75  all VarCurr (v262(VarCurr,bitIndex2)<->v4759(VarCurr)).
% 297.52/295.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4852(VarNext)-> (v4759(VarNext)<->v4759(VarCurr)))).
% 297.52/295.75  all VarNext (v4852(VarNext)-> (v4759(VarNext)<->v4887(VarNext))).
% 297.52/295.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4887(VarNext)<->v4885(VarCurr))).
% 297.52/295.75  all VarCurr (-v4761(VarCurr)-> (v4885(VarCurr)<->v4888(VarCurr))).
% 297.52/295.75  all VarCurr (v4761(VarCurr)-> (v4885(VarCurr)<->v4786(VarCurr))).
% 297.52/295.75  all VarCurr (-v4865(VarCurr)-> (v4888(VarCurr)<->v4839(VarCurr))).
% 297.52/295.75  all VarCurr (v4865(VarCurr)-> (v4888(VarCurr)<->v4889(VarCurr))).
% 297.52/295.75  all VarCurr (-v4868(VarCurr)& -v4870(VarCurr)-> (v4889(VarCurr)<->v4893(VarCurr))).
% 297.52/295.75  all VarCurr (v4870(VarCurr)-> (v4889(VarCurr)<->v4892(VarCurr))).
% 297.52/295.75  all VarCurr (v4868(VarCurr)-> (v4889(VarCurr)<->v4890(VarCurr))).
% 297.52/295.75  all VarCurr (-v4878(VarCurr)-> (v4893(VarCurr)<->v4839(VarCurr))).
% 297.52/295.75  all VarCurr (v4878(VarCurr)-> (v4893(VarCurr)<->$T)).
% 297.52/295.75  all VarCurr (-v4872(VarCurr)-> (v4892(VarCurr)<->v4839(VarCurr))).
% 297.52/295.75  all VarCurr (v4872(VarCurr)-> (v4892(VarCurr)<->$F)).
% 297.52/295.75  all VarCurr (-v4891(VarCurr)-> (v4890(VarCurr)<->$F)).
% 297.52/295.75  all VarCurr (v4891(VarCurr)-> (v4890(VarCurr)<->$T)).
% 297.52/295.75  all VarCurr (v4891(VarCurr)<-> (v4794(VarCurr)<->$T)).
% 297.52/295.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4852(VarNext)<->v4853(VarNext)&v4862(VarNext))).
% 297.52/295.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4862(VarNext)<->v4860(VarCurr))).
% 297.52/295.75  all VarCurr (v4860(VarCurr)<->v4761(VarCurr)|v4863(VarCurr)).
% 297.52/295.75  all VarCurr (v4863(VarCurr)<->v4864(VarCurr)&v4884(VarCurr)).
% 297.52/295.75  all VarCurr (-v4884(VarCurr)<->v4761(VarCurr)).
% 297.52/295.75  all VarCurr (v4864(VarCurr)<->v4865(VarCurr)|v4882(VarCurr)).
% 297.52/295.75  all VarCurr (v4882(VarCurr)<->v4820(VarCurr)&v4883(VarCurr)).
% 297.52/295.75  all VarCurr (-v4883(VarCurr)<->v4822(VarCurr)).
% 297.52/295.75  all VarCurr (v4865(VarCurr)<->v4866(VarCurr)&v4822(VarCurr)).
% 297.52/295.75  all VarCurr (v4866(VarCurr)<->v4867(VarCurr)|v4876(VarCurr)).
% 297.52/295.75  all VarCurr (v4876(VarCurr)<->v4877(VarCurr)&v4881(VarCurr)).
% 297.52/295.75  all VarCurr (v4881(VarCurr)<-> (v4869(VarCurr,bitIndex2)<->$F)& (v4869(VarCurr,bitIndex1)<->$F)& (v4869(VarCurr,bitIndex0)<->$T)).
% 297.52/295.75  all VarCurr (v4877(VarCurr)<->v4878(VarCurr)|v4879(VarCurr)).
% 297.52/295.75  all VarCurr (v4879(VarCurr)<->v4820(VarCurr)&v4880(VarCurr)).
% 297.52/295.75  all VarCurr (-v4880(VarCurr)<->v4878(VarCurr)).
% 297.52/295.75  all VarCurr (v4878(VarCurr)<-> (v4794(VarCurr)<->$T)).
% 297.52/295.75  all VarCurr (v4867(VarCurr)<->v4868(VarCurr)|v4870(VarCurr)).
% 297.52/295.75  all VarCurr (v4870(VarCurr)<->v4871(VarCurr)&v4875(VarCurr)).
% 297.52/295.75  all VarCurr (v4875(VarCurr)<-> (v4869(VarCurr,bitIndex2)<->$F)& (v4869(VarCurr,bitIndex1)<->$T)& (v4869(VarCurr,bitIndex0)<->$F)).
% 297.52/295.75  all VarCurr (v4871(VarCurr)<->v4872(VarCurr)|v4873(VarCurr)).
% 297.52/295.75  all VarCurr (v4873(VarCurr)<->v4820(VarCurr)&v4874(VarCurr)).
% 297.52/295.75  all VarCurr (-v4874(VarCurr)<->v4872(VarCurr)).
% 297.52/295.75  all VarCurr (v4872(VarCurr)<-> (v4794(VarCurr)<->$T)).
% 297.52/295.75  all VarCurr (v4868(VarCurr)<-> (v4869(VarCurr,bitIndex2)<->$T)& (v4869(VarCurr,bitIndex1)<->$F)& (v4869(VarCurr,bitIndex0)<->$F)).
% 297.52/295.75  all VarCurr (v4869(VarCurr,bitIndex0)<->v4792(VarCurr)).
% 297.52/295.75  all VarCurr (v4869(VarCurr,bitIndex1)<->v4790(VarCurr)).
% 297.52/295.75  all VarCurr (v4869(VarCurr,bitIndex2)<->v4788(VarCurr)).
% 297.52/295.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4853(VarNext)<->v4854(VarNext)&v4841(VarNext))).
% 297.52/295.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4854(VarNext)<->v4856(VarNext))).
% 297.52/295.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4856(VarNext)<->v4841(VarCurr))).
% 297.52/295.75  v4759(constB0)<->$F.
% 297.52/295.75  all VarCurr (v4841(VarCurr)<->v4843(VarCurr)).
% 297.52/295.75  all VarCurr (v4843(VarCurr)<->v4845(VarCurr)).
% 297.52/295.75  all VarCurr (v4845(VarCurr)<->v4847(VarCurr)).
% 297.52/295.75  all VarCurr (v4847(VarCurr)<->v4849(VarCurr)).
% 297.52/295.75  all VarCurr (v4849(VarCurr)<->v714(VarCurr)).
% 297.52/295.75  all VarCurr (v4839(VarCurr)<->$F).
% 297.52/295.75  all VarCurr (v4822(VarCurr)<->v4824(VarCurr)).
% 297.52/295.75  all VarCurr (v4824(VarCurr)<->v4826(VarCurr)).
% 297.52/295.75  all VarCurr (v4826(VarCurr)<->v4828(VarCurr)).
% 297.52/295.75  all VarCurr (v4828(VarCurr)<->v4830(VarCurr)&v4836(VarCurr)).
% 297.52/295.75  all VarCurr (v4836(VarCurr)<->v680(VarCurr)).
% 297.52/295.75  all VarCurr (v4830(VarCurr)<->v4832(VarCurr)).
% 297.52/295.75  all VarCurr (v4832(VarCurr)<->v4834(VarCurr)).
% 297.52/295.75  v4834(constB0)<->$F.
% 297.52/295.75  all VarCurr (v4820(VarCurr)<->$F).
% 297.52/295.75  all VarCurr (v4794(VarCurr)<->v4796(VarCurr,bitIndex2)).
% 297.52/295.75  all VarCurr (v4796(VarCurr,bitIndex2)<->v4798(VarCurr,bitIndex2)).
% 297.52/295.75  all VarCurr (v4798(VarCurr,bitIndex2)<->v4800(VarCurr,bitIndex2)).
% 297.52/295.75  all VarCurr (v4800(VarCurr,bitIndex2)<->v4802(VarCurr,bitIndex2)).
% 297.52/295.75  all VarCurr (v4802(VarCurr,bitIndex2)<->v4804(VarCurr,bitIndex2)).
% 297.52/295.75  all VarCurr (v4804(VarCurr,bitIndex2)<->v4806(VarCurr,bitIndex2)).
% 297.52/295.75  all VarCurr (v4806(VarCurr,bitIndex2)<->v4808(VarCurr,bitIndex2)).
% 297.52/295.75  all VarCurr (v4808(VarCurr,bitIndex2)<->v4810(VarCurr,bitIndex2)).
% 297.52/295.75  all VarCurr (v4810(VarCurr,bitIndex2)<->v4812(VarCurr,bitIndex2)).
% 297.52/295.75  all VarCurr (v4812(VarCurr,bitIndex2)<->v4814(VarCurr,bitIndex2)).
% 297.52/295.75  all VarCurr (v4814(VarCurr,bitIndex2)<->v4816(VarCurr,bitIndex2)).
% 297.52/295.75  all VarCurr (v4816(VarCurr,bitIndex2)<->v4818(VarCurr,bitIndex2)).
% 297.52/295.75  all B (range_63_0(B)-> (v4818(constB0,B)<->$F)).
% 297.52/295.75  all VarCurr (v4792(VarCurr)<->$F).
% 297.52/295.75  all VarCurr (v4790(VarCurr)<->$F).
% 297.52/295.75  all VarCurr (v4788(VarCurr)<->$T).
% 297.52/295.75  all VarCurr (v4786(VarCurr)<->$F).
% 297.52/295.75  all VarCurr (v4761(VarCurr)<->v4763(VarCurr)).
% 297.52/295.75  all VarCurr (-v4763(VarCurr)<->v4765(VarCurr)).
% 297.52/295.75  all VarCurr (v4765(VarCurr)<->v4767(VarCurr)).
% 297.52/295.75  all VarCurr (v4767(VarCurr)<->v4769(VarCurr)).
% 297.52/295.75  all VarCurr (v4769(VarCurr)<->v4771(VarCurr)).
% 297.52/295.75  all VarCurr (v4771(VarCurr)<->v4773(VarCurr)).
% 297.52/295.75  all VarCurr (v4773(VarCurr)<->v4775(VarCurr)).
% 297.52/295.75  all VarCurr (v4775(VarCurr)<->v4777(VarCurr)).
% 297.52/295.75  all VarCurr (v4777(VarCurr)<->v4779(VarCurr)).
% 297.52/295.75  all VarCurr (v4779(VarCurr)<->v4781(VarCurr)).
% 297.52/295.75  all VarCurr (v4781(VarCurr)<->v4783(VarCurr)).
% 297.52/295.75  all VarCurr (v4783(VarCurr)<->v670(VarCurr)).
% 297.52/295.75  all VarCurr (v775(VarCurr,bitIndex2)<->v777(VarCurr,bitIndex2)).
% 297.52/295.75  all VarCurr (v777(VarCurr,bitIndex2)<->v779(VarCurr,bitIndex2)).
% 297.52/295.75  all VarCurr (v4738(VarCurr)<->v103(VarCurr,bitIndex0)).
% 297.52/295.75  all VarCurr (v4736(VarCurr)<->v85(VarCurr,bitIndex0)).
% 297.52/295.75  all VarCurr (v4687(VarCurr)<->v4689(VarCurr)).
% 297.52/295.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4713(VarNext)-> (v4689(VarNext)<->v4689(VarCurr)))).
% 297.52/295.75  all VarNext (v4713(VarNext)-> (v4689(VarNext)<->v4725(VarNext))).
% 297.52/295.75  all VarCurr (-v4714(VarCurr)-> (v4725(VarCurr)<->v4726(VarCurr))).
% 297.52/295.75  all VarCurr (v4714(VarCurr)-> (v4725(VarCurr)<->v4695(VarCurr))).
% 297.52/295.75  all VarCurr (-v4719(VarCurr)-> (v4726(VarCurr)<->v4697(VarCurr))).
% 297.52/295.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4719(VarCurr)-> (v4726(VarCurr)<->x166(VarNext)))).
% 297.52/295.75  all VarCurr (v4713(VarCurr)<->v4714(VarCurr)|v4717(VarCurr)).
% 297.52/295.75  all VarCurr (v4717(VarCurr)<->v4718(VarCurr)&v4724(VarCurr)).
% 297.52/295.75  all VarCurr (-v4724(VarCurr)<->v4714(VarCurr)).
% 297.52/295.75  all VarCurr (v4718(VarCurr)<->v4719(VarCurr)|v4721(VarCurr)).
% 297.52/295.75  all VarCurr (v4721(VarCurr)<->v4722(VarCurr)&v4723(VarCurr)).
% 297.52/295.75  all VarCurr (-v4723(VarCurr)<->v4719(VarCurr)).
% 297.52/295.75  all VarCurr (v4722(VarCurr)<->v4691(VarCurr)&v4693(VarCurr)).
% 297.52/295.75  all VarCurr (v4719(VarCurr)<->v4691(VarCurr)&v4720(VarCurr)).
% 297.52/295.75  all VarCurr (-v4720(VarCurr)<->v4693(VarCurr)).
% 297.52/295.75  all VarCurr (v4714(VarCurr)<->v4715(VarCurr)&v4716(VarCurr)).
% 297.52/295.75  all VarCurr (-v4716(VarCurr)<->v4693(VarCurr)).
% 297.52/295.75  all VarCurr (-v4715(VarCurr)<->v4691(VarCurr)).
% 297.52/295.75  v4689(constB0)<->$F.
% 297.52/295.75  all VarCurr (v4697(VarCurr)<->v4517(VarCurr,bitIndex4)).
% 297.52/295.75  all VarCurr (v4517(VarCurr,bitIndex4)<->v4699(VarCurr)).
% 297.52/295.75  all VarCurr (v4699(VarCurr)<->v4701(VarCurr)).
% 297.52/295.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4706(VarNext)-> (v4701(VarNext)<->v4701(VarCurr)))).
% 297.52/295.75  all VarNext (v4706(VarNext)-> (v4701(VarNext)<->v4644(VarNext))).
% 297.52/295.75  all VarCurr (v4706(VarCurr)<->v4648(VarCurr)&v4707(VarCurr)).
% 297.52/295.75  all VarCurr (-v4707(VarCurr)<->v4703(VarCurr)).
% 297.52/295.75  v4701(constB0)<->$F.
% 297.52/295.75  all VarCurr (v4703(VarCurr)<->v184(VarCurr,bitIndex0)).
% 297.52/295.75  all VarCurr (v4695(VarCurr)<->v771(VarCurr,bitIndex3)).
% 297.52/295.75  all VarCurr (v771(VarCurr,bitIndex3)<->v773(VarCurr,bitIndex3)).
% 297.52/295.75  all VarCurr (v773(VarCurr,bitIndex3)<->v4466(VarCurr,bitIndex3)).
% 297.52/295.75  all VarCurr (v781(VarCurr,bitIndex3)<->v4465(VarCurr,bitIndex3)).
% 297.52/295.75  all VarCurr (v4458(VarCurr,bitIndex3)<->v4460(VarCurr,bitIndex3)).
% 297.52/295.75  all VarCurr (v4460(VarCurr,bitIndex3)<->v4462(VarCurr,bitIndex3)).
% 297.56/295.76  all VarCurr (v4462(VarCurr,bitIndex3)<->v4464(VarCurr,bitIndex3)).
% 297.56/295.76  all VarCurr (v4464(VarCurr,bitIndex3)<->v1212(VarCurr,bitIndex3)).
% 297.56/295.76  all VarCurr (v783(VarCurr,bitIndex3)<->v785(VarCurr,bitIndex3)).
% 297.56/295.76  all VarCurr (v785(VarCurr,bitIndex3)<->v787(VarCurr,bitIndex3)).
% 297.56/295.76  all VarCurr (v787(VarCurr,bitIndex3)<->v789(VarCurr,bitIndex3)).
% 297.56/295.76  all VarCurr (v789(VarCurr,bitIndex3)<->v4455(VarCurr,bitIndex3)).
% 297.56/295.76  all VarCurr (v791(VarCurr,bitIndex0)<->v793(VarCurr,bitIndex0)).
% 297.56/295.76  all VarCurr (v793(VarCurr,bitIndex0)<->v795(VarCurr,bitIndex0)).
% 297.56/295.76  all VarCurr (v795(VarCurr,bitIndex0)<->v4454(VarCurr,bitIndex0)).
% 297.56/295.76  all VarCurr (v775(VarCurr,bitIndex3)<->v777(VarCurr,bitIndex3)).
% 297.56/295.76  all VarCurr (v777(VarCurr,bitIndex3)<->v779(VarCurr,bitIndex3)).
% 297.56/295.76  all VarCurr (v4693(VarCurr)<->v103(VarCurr,bitIndex0)).
% 297.56/295.76  all VarCurr (v4691(VarCurr)<->v85(VarCurr,bitIndex0)).
% 297.56/295.76  all VarCurr (v4642(VarCurr)<->v4644(VarCurr)).
% 297.56/295.76  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4668(VarNext)-> (v4644(VarNext)<->v4644(VarCurr)))).
% 297.56/295.76  all VarNext (v4668(VarNext)-> (v4644(VarNext)<->v4680(VarNext))).
% 297.56/295.76  all VarCurr (-v4669(VarCurr)-> (v4680(VarCurr)<->v4681(VarCurr))).
% 297.56/295.76  all VarCurr (v4669(VarCurr)-> (v4680(VarCurr)<->v4650(VarCurr))).
% 297.56/295.76  all VarCurr (-v4674(VarCurr)-> (v4681(VarCurr)<->v4652(VarCurr))).
% 297.56/295.76  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4674(VarCurr)-> (v4681(VarCurr)<->x166(VarNext)))).
% 297.56/295.76  all VarCurr (v4668(VarCurr)<->v4669(VarCurr)|v4672(VarCurr)).
% 297.56/295.76  all VarCurr (v4672(VarCurr)<->v4673(VarCurr)&v4679(VarCurr)).
% 297.56/295.76  all VarCurr (-v4679(VarCurr)<->v4669(VarCurr)).
% 297.56/295.76  all VarCurr (v4673(VarCurr)<->v4674(VarCurr)|v4676(VarCurr)).
% 297.56/295.76  all VarCurr (v4676(VarCurr)<->v4677(VarCurr)&v4678(VarCurr)).
% 297.56/295.76  all VarCurr (-v4678(VarCurr)<->v4674(VarCurr)).
% 297.56/295.76  all VarCurr (v4677(VarCurr)<->v4646(VarCurr)&v4648(VarCurr)).
% 297.56/295.76  all VarCurr (v4674(VarCurr)<->v4646(VarCurr)&v4675(VarCurr)).
% 297.56/295.76  all VarCurr (-v4675(VarCurr)<->v4648(VarCurr)).
% 297.56/295.76  all VarCurr (v4669(VarCurr)<->v4670(VarCurr)&v4671(VarCurr)).
% 297.56/295.76  all VarCurr (-v4671(VarCurr)<->v4648(VarCurr)).
% 297.56/295.76  all VarCurr (-v4670(VarCurr)<->v4646(VarCurr)).
% 297.56/295.76  v4644(constB0)<->$F.
% 297.56/295.76  all VarCurr (v4652(VarCurr)<->v4517(VarCurr,bitIndex5)).
% 297.56/295.76  all VarCurr (v4517(VarCurr,bitIndex5)<->v4654(VarCurr)).
% 297.56/295.76  all VarCurr (v4654(VarCurr)<->v4656(VarCurr)).
% 297.56/295.76  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4661(VarNext)-> (v4656(VarNext)<->v4656(VarCurr)))).
% 297.56/295.76  all VarNext (v4661(VarNext)-> (v4656(VarNext)<->v4599(VarNext))).
% 297.56/295.76  all VarCurr (v4661(VarCurr)<->v4603(VarCurr)&v4662(VarCurr)).
% 297.56/295.76  all VarCurr (-v4662(VarCurr)<->v4658(VarCurr)).
% 297.56/295.76  v4656(constB0)<->$F.
% 297.56/295.76  all VarCurr (v4658(VarCurr)<->v184(VarCurr,bitIndex0)).
% 297.56/295.76  all VarCurr (v4650(VarCurr)<->v771(VarCurr,bitIndex4)).
% 297.56/295.76  all VarCurr (v771(VarCurr,bitIndex4)<->v773(VarCurr,bitIndex4)).
% 297.56/295.76  all VarCurr (v773(VarCurr,bitIndex4)<->v4466(VarCurr,bitIndex4)).
% 297.56/295.76  all VarCurr (v781(VarCurr,bitIndex4)<->v4465(VarCurr,bitIndex4)).
% 297.56/295.76  all VarCurr (v4458(VarCurr,bitIndex4)<->v4460(VarCurr,bitIndex4)).
% 297.56/295.76  all VarCurr (v4460(VarCurr,bitIndex4)<->v4462(VarCurr,bitIndex4)).
% 297.56/295.76  all VarCurr (v4462(VarCurr,bitIndex4)<->v4464(VarCurr,bitIndex4)).
% 297.56/295.76  all VarCurr (v4464(VarCurr,bitIndex4)<->v1212(VarCurr,bitIndex4)).
% 297.56/295.76  all VarCurr (v783(VarCurr,bitIndex4)<->v785(VarCurr,bitIndex4)).
% 297.56/295.76  all VarCurr (v785(VarCurr,bitIndex4)<->v787(VarCurr,bitIndex4)).
% 297.56/295.76  all VarCurr (v787(VarCurr,bitIndex4)<->v789(VarCurr,bitIndex4)).
% 297.56/295.76  all VarCurr (v789(VarCurr,bitIndex4)<->v4455(VarCurr,bitIndex4)).
% 297.56/295.76  all VarCurr (v791(VarCurr,bitIndex1)<->v793(VarCurr,bitIndex1)).
% 297.56/295.76  all VarCurr (v793(VarCurr,bitIndex1)<->v795(VarCurr,bitIndex1)).
% 297.56/295.76  all VarCurr (v795(VarCurr,bitIndex1)<->v4454(VarCurr,bitIndex1)).
% 297.56/295.76  all VarCurr (v775(VarCurr,bitIndex4)<->v777(VarCurr,bitIndex4)).
% 297.56/295.76  all VarCurr (v777(VarCurr,bitIndex4)<->v779(VarCurr,bitIndex4)).
% 297.56/295.76  all VarCurr (v4648(VarCurr)<->v103(VarCurr,bitIndex0)).
% 297.56/295.76  all VarCurr (v4646(VarCurr)<->v85(VarCurr,bitIndex0)).
% 297.56/295.76  all VarCurr (v4597(VarCurr)<->v4599(VarCurr)).
% 297.56/295.76  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4623(VarNext)-> (v4599(VarNext)<->v4599(VarCurr)))).
% 297.56/295.76  all VarNext (v4623(VarNext)-> (v4599(VarNext)<->v4635(VarNext))).
% 297.56/295.76  all VarCurr (-v4624(VarCurr)-> (v4635(VarCurr)<->v4636(VarCurr))).
% 297.56/295.76  all VarCurr (v4624(VarCurr)-> (v4635(VarCurr)<->v4605(VarCurr))).
% 297.56/295.77  all VarCurr (-v4629(VarCurr)-> (v4636(VarCurr)<->v4607(VarCurr))).
% 297.56/295.77  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4629(VarCurr)-> (v4636(VarCurr)<->x166(VarNext)))).
% 297.56/295.77  all VarCurr (v4623(VarCurr)<->v4624(VarCurr)|v4627(VarCurr)).
% 297.56/295.77  all VarCurr (v4627(VarCurr)<->v4628(VarCurr)&v4634(VarCurr)).
% 297.56/295.77  all VarCurr (-v4634(VarCurr)<->v4624(VarCurr)).
% 297.56/295.77  all VarCurr (v4628(VarCurr)<->v4629(VarCurr)|v4631(VarCurr)).
% 297.56/295.77  all VarCurr (v4631(VarCurr)<->v4632(VarCurr)&v4633(VarCurr)).
% 297.56/295.77  all VarCurr (-v4633(VarCurr)<->v4629(VarCurr)).
% 297.56/295.77  all VarCurr (v4632(VarCurr)<->v4601(VarCurr)&v4603(VarCurr)).
% 297.56/295.77  all VarCurr (v4629(VarCurr)<->v4601(VarCurr)&v4630(VarCurr)).
% 297.56/295.77  all VarCurr (-v4630(VarCurr)<->v4603(VarCurr)).
% 297.56/295.77  all VarCurr (v4624(VarCurr)<->v4625(VarCurr)&v4626(VarCurr)).
% 297.56/295.77  all VarCurr (-v4626(VarCurr)<->v4603(VarCurr)).
% 297.56/295.77  all VarCurr (-v4625(VarCurr)<->v4601(VarCurr)).
% 297.56/295.77  v4599(constB0)<->$F.
% 297.56/295.77  all VarCurr (v4607(VarCurr)<->v4517(VarCurr,bitIndex6)).
% 297.56/295.77  all VarCurr (v4517(VarCurr,bitIndex6)<->v4609(VarCurr)).
% 297.56/295.77  all VarCurr (v4609(VarCurr)<->v4611(VarCurr)).
% 297.56/295.77  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4616(VarNext)-> (v4611(VarNext)<->v4611(VarCurr)))).
% 297.56/295.77  all VarNext (v4616(VarNext)-> (v4611(VarNext)<->v4554(VarNext))).
% 297.56/295.77  all VarCurr (v4616(VarCurr)<->v4558(VarCurr)&v4617(VarCurr)).
% 297.56/295.77  all VarCurr (-v4617(VarCurr)<->v4613(VarCurr)).
% 297.56/295.77  v4611(constB0)<->$F.
% 297.56/295.77  all VarCurr (v4613(VarCurr)<->v184(VarCurr,bitIndex0)).
% 297.56/295.77  all VarCurr (v4605(VarCurr)<->v771(VarCurr,bitIndex5)).
% 297.56/295.77  all VarCurr (v771(VarCurr,bitIndex5)<->v773(VarCurr,bitIndex5)).
% 297.56/295.77  all VarCurr (v773(VarCurr,bitIndex5)<->v4466(VarCurr,bitIndex5)).
% 297.56/295.77  all VarCurr (v781(VarCurr,bitIndex5)<->v4465(VarCurr,bitIndex5)).
% 297.56/295.77  all VarCurr (v4458(VarCurr,bitIndex5)<->v4460(VarCurr,bitIndex5)).
% 297.56/295.77  all VarCurr (v4460(VarCurr,bitIndex5)<->v4462(VarCurr,bitIndex5)).
% 297.56/295.77  all VarCurr (v4462(VarCurr,bitIndex5)<->v4464(VarCurr,bitIndex5)).
% 297.56/295.77  all VarCurr (v4464(VarCurr,bitIndex5)<->v1212(VarCurr,bitIndex5)).
% 297.56/295.77  all VarCurr (v783(VarCurr,bitIndex5)<->v785(VarCurr,bitIndex5)).
% 297.56/295.77  all VarCurr (v785(VarCurr,bitIndex5)<->v787(VarCurr,bitIndex5)).
% 297.56/295.77  all VarCurr (v787(VarCurr,bitIndex5)<->v789(VarCurr,bitIndex5)).
% 297.56/295.77  all VarCurr (v789(VarCurr,bitIndex5)<->v4455(VarCurr,bitIndex5)).
% 297.56/295.77  all VarCurr (v791(VarCurr,bitIndex2)<->v793(VarCurr,bitIndex2)).
% 297.56/295.77  all VarCurr (v793(VarCurr,bitIndex2)<->v795(VarCurr,bitIndex2)).
% 297.56/295.77  all VarCurr (v795(VarCurr,bitIndex2)<->v4454(VarCurr,bitIndex2)).
% 297.56/295.77  all VarCurr (v775(VarCurr,bitIndex5)<->v777(VarCurr,bitIndex5)).
% 297.56/295.77  all VarCurr (v777(VarCurr,bitIndex5)<->v779(VarCurr,bitIndex5)).
% 297.56/295.77  all VarCurr (v4603(VarCurr)<->v103(VarCurr,bitIndex0)).
% 297.56/295.77  all VarCurr (v4601(VarCurr)<->v85(VarCurr,bitIndex0)).
% 297.56/295.77  all VarCurr (v4552(VarCurr)<->v4554(VarCurr)).
% 297.56/295.77  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4578(VarNext)-> (v4554(VarNext)<->v4554(VarCurr)))).
% 297.56/295.77  all VarNext (v4578(VarNext)-> (v4554(VarNext)<->v4590(VarNext))).
% 297.56/295.77  all VarCurr (-v4579(VarCurr)-> (v4590(VarCurr)<->v4591(VarCurr))).
% 297.56/295.77  all VarCurr (v4579(VarCurr)-> (v4590(VarCurr)<->v4560(VarCurr))).
% 297.56/295.77  all VarCurr (-v4584(VarCurr)-> (v4591(VarCurr)<->v4562(VarCurr))).
% 297.56/295.77  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4584(VarCurr)-> (v4591(VarCurr)<->x166(VarNext)))).
% 297.56/295.77  all VarCurr (v4578(VarCurr)<->v4579(VarCurr)|v4582(VarCurr)).
% 297.56/295.77  all VarCurr (v4582(VarCurr)<->v4583(VarCurr)&v4589(VarCurr)).
% 297.56/295.77  all VarCurr (-v4589(VarCurr)<->v4579(VarCurr)).
% 297.56/295.77  all VarCurr (v4583(VarCurr)<->v4584(VarCurr)|v4586(VarCurr)).
% 297.56/295.77  all VarCurr (v4586(VarCurr)<->v4587(VarCurr)&v4588(VarCurr)).
% 297.56/295.77  all VarCurr (-v4588(VarCurr)<->v4584(VarCurr)).
% 297.56/295.77  all VarCurr (v4587(VarCurr)<->v4556(VarCurr)&v4558(VarCurr)).
% 297.56/295.77  all VarCurr (v4584(VarCurr)<->v4556(VarCurr)&v4585(VarCurr)).
% 297.56/295.77  all VarCurr (-v4585(VarCurr)<->v4558(VarCurr)).
% 297.56/295.77  all VarCurr (v4579(VarCurr)<->v4580(VarCurr)&v4581(VarCurr)).
% 297.56/295.77  all VarCurr (-v4581(VarCurr)<->v4558(VarCurr)).
% 297.56/295.77  all VarCurr (-v4580(VarCurr)<->v4556(VarCurr)).
% 297.56/295.77  v4554(constB0)<->$F.
% 297.56/295.77  all VarCurr (v4562(VarCurr)<->v4517(VarCurr,bitIndex7)).
% 297.56/295.77  all VarCurr (v4517(VarCurr,bitIndex7)<->v4564(VarCurr)).
% 297.56/295.77  all VarCurr (v4564(VarCurr)<->v4566(VarCurr)).
% 297.56/295.77  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4571(VarNext)-> (v4566(VarNext)<->v4566(VarCurr)))).
% 297.56/295.78  all VarNext (v4571(VarNext)-> (v4566(VarNext)<->v4507(VarNext))).
% 297.56/295.78  all VarCurr (v4571(VarCurr)<->v4511(VarCurr)&v4572(VarCurr)).
% 297.56/295.78  all VarCurr (-v4572(VarCurr)<->v4568(VarCurr)).
% 297.56/295.78  v4566(constB0)<->$F.
% 297.56/295.78  all VarCurr (v4568(VarCurr)<->v184(VarCurr,bitIndex0)).
% 297.56/295.78  all VarCurr (v4560(VarCurr)<->v771(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v771(VarCurr,bitIndex6)<->v773(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v773(VarCurr,bitIndex6)<->v4466(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v781(VarCurr,bitIndex6)<->v4465(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v4458(VarCurr,bitIndex6)<->v4460(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v4460(VarCurr,bitIndex6)<->v4462(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v4462(VarCurr,bitIndex6)<->v4464(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v4464(VarCurr,bitIndex6)<->v1212(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v1212(VarCurr,bitIndex6)<->v1214(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v1214(VarCurr,bitIndex6)<->v1216(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v1216(VarCurr,bitIndex6)<->v1218(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v1218(VarCurr,bitIndex6)<->v1220(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v1220(VarCurr,bitIndex6)<->v1222(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v1222(VarCurr,bitIndex6)<->v1224(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v1224(VarCurr,bitIndex6)<->v1226(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v1226(VarCurr,bitIndex6)<->v1228(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v1228(VarCurr,bitIndex6)<->v1230(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v1230(VarCurr,bitIndex6)<->v1232(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v1232(VarCurr,bitIndex6)<->v1234(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v783(VarCurr,bitIndex6)<->v785(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v785(VarCurr,bitIndex6)<->v787(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v787(VarCurr,bitIndex6)<->v789(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v789(VarCurr,bitIndex6)<->v4455(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v791(VarCurr,bitIndex3)<->v793(VarCurr,bitIndex3)).
% 297.56/295.78  all VarCurr (v793(VarCurr,bitIndex3)<->v795(VarCurr,bitIndex3)).
% 297.56/295.78  all VarCurr (v795(VarCurr,bitIndex3)<->v4454(VarCurr,bitIndex3)).
% 297.56/295.78  all VarCurr (v775(VarCurr,bitIndex6)<->v777(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v777(VarCurr,bitIndex6)<->v779(VarCurr,bitIndex6)).
% 297.56/295.78  all VarCurr (v4558(VarCurr)<->v103(VarCurr,bitIndex0)).
% 297.56/295.78  all VarCurr (v4556(VarCurr)<->v85(VarCurr,bitIndex0)).
% 297.56/295.78  all VarCurr (v4505(VarCurr)<->v4507(VarCurr)).
% 297.56/295.78  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4533(VarNext)-> (v4507(VarNext)<->v4507(VarCurr)))).
% 297.56/295.78  all VarNext (v4533(VarNext)-> (v4507(VarNext)<->v4545(VarNext))).
% 297.56/295.78  all VarCurr (-v4534(VarCurr)-> (v4545(VarCurr)<->v4546(VarCurr))).
% 297.56/295.78  all VarCurr (v4534(VarCurr)-> (v4545(VarCurr)<->v4513(VarCurr))).
% 297.56/295.78  all VarCurr (-v4539(VarCurr)-> (v4546(VarCurr)<->v4515(VarCurr))).
% 297.56/295.78  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4539(VarCurr)-> (v4546(VarCurr)<->x166(VarNext)))).
% 297.56/295.78  all VarCurr (v4533(VarCurr)<->v4534(VarCurr)|v4537(VarCurr)).
% 297.56/295.78  all VarCurr (v4537(VarCurr)<->v4538(VarCurr)&v4544(VarCurr)).
% 297.56/295.78  all VarCurr (-v4544(VarCurr)<->v4534(VarCurr)).
% 297.56/295.78  all VarCurr (v4538(VarCurr)<->v4539(VarCurr)|v4541(VarCurr)).
% 297.56/295.78  all VarCurr (v4541(VarCurr)<->v4542(VarCurr)&v4543(VarCurr)).
% 297.56/295.78  all VarCurr (-v4543(VarCurr)<->v4539(VarCurr)).
% 297.56/295.78  all VarCurr (v4542(VarCurr)<->v4509(VarCurr)&v4511(VarCurr)).
% 297.56/295.78  all VarCurr (v4539(VarCurr)<->v4509(VarCurr)&v4540(VarCurr)).
% 297.56/295.78  all VarCurr (-v4540(VarCurr)<->v4511(VarCurr)).
% 297.56/295.78  all VarCurr (v4534(VarCurr)<->v4535(VarCurr)&v4536(VarCurr)).
% 297.56/295.78  all VarCurr (-v4536(VarCurr)<->v4511(VarCurr)).
% 297.56/295.78  all VarCurr (-v4535(VarCurr)<->v4509(VarCurr)).
% 297.56/295.78  v4507(constB0)<->$F.
% 297.56/295.78  all VarCurr (v4515(VarCurr)<->v4517(VarCurr,bitIndex8)).
% 297.56/295.78  all VarCurr (v4517(VarCurr,bitIndex8)<->v4519(VarCurr)).
% 297.56/295.78  all VarCurr (v4519(VarCurr)<->v4521(VarCurr)).
% 297.56/295.78  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4526(VarNext)-> (v4521(VarNext)<->v4521(VarCurr)))).
% 297.56/295.78  all VarNext (v4526(VarNext)-> (v4521(VarNext)<->v763(VarNext))).
% 297.56/295.78  all VarCurr (v4526(VarCurr)<->v767(VarCurr)&v4527(VarCurr)).
% 297.56/295.78  all VarCurr (-v4527(VarCurr)<->v4523(VarCurr)).
% 297.56/295.78  v4521(constB0)<->$F.
% 297.56/295.78  all VarCurr (v4523(VarCurr)<->v184(VarCurr,bitIndex0)).
% 297.56/295.78  all VarCurr (v4513(VarCurr)<->v771(VarCurr,bitIndex7)).
% 297.56/295.78  all VarCurr (v771(VarCurr,bitIndex7)<->v773(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v773(VarCurr,bitIndex7)<->v4466(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v781(VarCurr,bitIndex7)<->v4465(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v4458(VarCurr,bitIndex7)<->v4460(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v4460(VarCurr,bitIndex7)<->v4462(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v4462(VarCurr,bitIndex7)<->v4464(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v4464(VarCurr,bitIndex7)<->v1212(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v1212(VarCurr,bitIndex7)<->v1214(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v1214(VarCurr,bitIndex7)<->v1216(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v1216(VarCurr,bitIndex7)<->v1218(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v1218(VarCurr,bitIndex7)<->v1220(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v1220(VarCurr,bitIndex7)<->v1222(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v1222(VarCurr,bitIndex7)<->v1224(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v1224(VarCurr,bitIndex7)<->v1226(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v1226(VarCurr,bitIndex7)<->v1228(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v1228(VarCurr,bitIndex7)<->v1230(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v1230(VarCurr,bitIndex7)<->v1232(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v1232(VarCurr,bitIndex7)<->v1234(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v783(VarCurr,bitIndex7)<->v785(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v785(VarCurr,bitIndex7)<->v787(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v787(VarCurr,bitIndex7)<->v789(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v789(VarCurr,bitIndex7)<->v4455(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v791(VarCurr,bitIndex4)<->v793(VarCurr,bitIndex4)).
% 297.56/295.80  all VarCurr (v793(VarCurr,bitIndex4)<->v795(VarCurr,bitIndex4)).
% 297.56/295.80  all VarCurr (v795(VarCurr,bitIndex4)<->v4454(VarCurr,bitIndex4)).
% 297.56/295.80  all VarCurr (v775(VarCurr,bitIndex7)<->v777(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v777(VarCurr,bitIndex7)<->v779(VarCurr,bitIndex7)).
% 297.56/295.80  all VarCurr (v4511(VarCurr)<->v103(VarCurr,bitIndex0)).
% 297.56/295.80  all VarCurr (v4509(VarCurr)<->v85(VarCurr,bitIndex0)).
% 297.56/295.80  all VarCurr (v761(VarCurr)<->v763(VarCurr)).
% 297.56/295.80  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4486(VarNext)-> (v763(VarNext)<->v763(VarCurr)))).
% 297.56/295.80  all VarNext (v4486(VarNext)-> (v763(VarNext)<->v4498(VarNext))).
% 297.56/295.80  all VarCurr (-v4487(VarCurr)-> (v4498(VarCurr)<->v4499(VarCurr))).
% 297.56/295.80  all VarCurr (v4487(VarCurr)-> (v4498(VarCurr)<->v769(VarCurr))).
% 297.56/295.80  all VarCurr (-v4492(VarCurr)-> (v4499(VarCurr)<->v4468(VarCurr))).
% 297.56/295.80  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4492(VarCurr)-> (v4499(VarCurr)<->x166(VarNext)))).
% 297.56/295.80  all VarCurr (v4486(VarCurr)<->v4487(VarCurr)|v4490(VarCurr)).
% 297.56/295.80  all VarCurr (v4490(VarCurr)<->v4491(VarCurr)&v4497(VarCurr)).
% 297.56/295.80  all VarCurr (-v4497(VarCurr)<->v4487(VarCurr)).
% 297.56/295.80  all VarCurr (v4491(VarCurr)<->v4492(VarCurr)|v4494(VarCurr)).
% 297.56/295.80  all VarCurr (v4494(VarCurr)<->v4495(VarCurr)&v4496(VarCurr)).
% 297.56/295.80  all VarCurr (-v4496(VarCurr)<->v4492(VarCurr)).
% 297.56/295.80  all VarCurr (v4495(VarCurr)<->v765(VarCurr)&v767(VarCurr)).
% 297.56/295.80  all VarCurr (v4492(VarCurr)<->v765(VarCurr)&v4493(VarCurr)).
% 297.56/295.80  all VarCurr (-v4493(VarCurr)<->v767(VarCurr)).
% 297.56/295.80  all VarCurr (v4487(VarCurr)<->v4488(VarCurr)&v4489(VarCurr)).
% 297.56/295.80  all VarCurr (-v4489(VarCurr)<->v767(VarCurr)).
% 297.56/295.80  all VarCurr (-v4488(VarCurr)<->v765(VarCurr)).
% 297.56/295.80  v763(constB0)<->$F.
% 297.56/295.80  all VarCurr (v4468(VarCurr)<->v4470(VarCurr)).
% 297.56/295.80  all VarCurr (v4470(VarCurr)<->v4472(VarCurr)).
% 297.56/295.80  all VarCurr (v4472(VarCurr)<->v4474(VarCurr)).
% 297.56/295.80  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4479(VarNext)-> (v4474(VarNext)<->v4474(VarCurr)))).
% 297.56/295.80  all VarNext (v4479(VarNext)-> (v4474(VarNext)<->v81(VarNext))).
% 297.56/295.80  all VarCurr (v4479(VarCurr)<->v101(VarCurr)&v4480(VarCurr)).
% 297.56/295.80  all VarCurr (-v4480(VarCurr)<->v4476(VarCurr)).
% 297.56/295.80  v4474(constB0)<->$F.
% 297.56/295.80  all VarCurr (v4476(VarCurr)<->v184(VarCurr,bitIndex0)).
% 297.56/295.80  all VarCurr (v769(VarCurr)<->v771(VarCurr,bitIndex8)).
% 297.56/295.80  all VarCurr (v771(VarCurr,bitIndex8)<->v773(VarCurr,bitIndex8)).
% 297.56/295.80  all VarCurr (v773(VarCurr,bitIndex8)<->v4466(VarCurr,bitIndex8)).
% 297.56/295.80  all VarCurr (-v154(VarCurr)-> (all B (range_8_0(B)-> (v4466(VarCurr,B)<->v781(VarCurr,B))))).
% 297.56/295.80  all VarCurr (v154(VarCurr)-> (all B (range_8_0(B)-> (v4466(VarCurr,B)<->v775(VarCurr,B))))).
% 297.56/295.80  all VarCurr (v781(VarCurr,bitIndex8)<->v4465(VarCurr,bitIndex8)).
% 297.56/295.80  all VarCurr (-v214(VarCurr)-> (all B (range_8_0(B)-> (v4465(VarCurr,B)<->v4458(VarCurr,B))))).
% 297.56/295.80  all VarCurr (v214(VarCurr)-> (all B (range_8_0(B)-> (v4465(VarCurr,B)<->v783(VarCurr,B))))).
% 297.56/295.82  all VarCurr (v4458(VarCurr,bitIndex8)<->v4460(VarCurr,bitIndex8)).
% 297.56/295.82  all VarCurr (v4460(VarCurr,bitIndex8)<->v4462(VarCurr,bitIndex8)).
% 297.56/295.82  all VarCurr (v4462(VarCurr,bitIndex8)<->v4464(VarCurr,bitIndex8)).
% 297.56/295.82  all VarCurr (v4464(VarCurr,bitIndex8)<->v1212(VarCurr,bitIndex8)).
% 297.56/295.82  all VarCurr (v1212(VarCurr,bitIndex8)<->v1214(VarCurr,bitIndex8)).
% 297.56/295.82  all VarCurr (v1214(VarCurr,bitIndex8)<->v1216(VarCurr,bitIndex8)).
% 297.56/295.82  all VarCurr (v1216(VarCurr,bitIndex8)<->v1218(VarCurr,bitIndex8)).
% 297.56/295.82  all VarCurr (v1218(VarCurr,bitIndex8)<->v1220(VarCurr,bitIndex8)).
% 297.56/295.82  all VarCurr (v1220(VarCurr,bitIndex8)<->v1222(VarCurr,bitIndex8)).
% 297.56/295.82  all VarCurr (v1222(VarCurr,bitIndex8)<->v1224(VarCurr,bitIndex8)).
% 297.56/295.82  all VarCurr (v1224(VarCurr,bitIndex8)<->v1226(VarCurr,bitIndex8)).
% 297.56/295.82  all VarCurr (v1226(VarCurr,bitIndex8)<->v1228(VarCurr,bitIndex8)).
% 297.56/295.82  all VarCurr (v1228(VarCurr,bitIndex8)<->v1230(VarCurr,bitIndex8)).
% 297.56/295.82  all VarCurr (v1230(VarCurr,bitIndex8)<->v1232(VarCurr,bitIndex8)).
% 297.56/295.82  all VarCurr (v1232(VarCurr,bitIndex8)<->v1234(VarCurr,bitIndex8)).
% 297.56/295.82  all VarCurr (v783(VarCurr,bitIndex8)<->v785(VarCurr,bitIndex8)).
% 297.56/295.82  all VarCurr (v785(VarCurr,bitIndex8)<->v787(VarCurr,bitIndex8)).
% 297.56/295.82  all VarCurr (v787(VarCurr,bitIndex8)<->v789(VarCurr,bitIndex8)).
% 297.56/295.82  all VarCurr (v789(VarCurr,bitIndex8)<->v4455(VarCurr,bitIndex8)).
% 297.56/295.82  all VarCurr B (range_2_0(B)-> (v4455(VarCurr,B)<->v4456(VarCurr,B))).
% 297.56/295.82  all VarCurr ((v4455(VarCurr,bitIndex8)<->v791(VarCurr,bitIndex5))& (v4455(VarCurr,bitIndex7)<->v791(VarCurr,bitIndex4))& (v4455(VarCurr,bitIndex6)<->v791(VarCurr,bitIndex3))& (v4455(VarCurr,bitIndex5)<->v791(VarCurr,bitIndex2))& (v4455(VarCurr,bitIndex4)<->v791(VarCurr,bitIndex1))& (v4455(VarCurr,bitIndex3)<->v791(VarCurr,bitIndex0))).
% 297.56/295.82  all VarCurr (v791(VarCurr,bitIndex5)<->v793(VarCurr,bitIndex5)).
% 297.56/295.82  all VarCurr (v793(VarCurr,bitIndex5)<->v795(VarCurr,bitIndex5)).
% 297.56/295.82  all VarCurr (v795(VarCurr,bitIndex5)<->v4454(VarCurr,bitIndex5)).
% 297.56/295.82  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)& -v797(VarCurr,bitIndex20)& -v797(VarCurr,bitIndex19)& -v797(VarCurr,bitIndex18)& -v797(VarCurr,bitIndex17)& -v797(VarCurr,bitIndex16)& -v797(VarCurr,bitIndex15)& -v797(VarCurr,bitIndex14)& -v797(VarCurr,bitIndex13)& -v797(VarCurr,bitIndex12)& -v797(VarCurr,bitIndex11)& -v797(VarCurr,bitIndex10)& -v797(VarCurr,bitIndex9)& -v797(VarCurr,bitIndex8)& -v797(VarCurr,bitIndex7)& -v797(VarCurr,bitIndex6)& -v797(VarCurr,bitIndex5)& -v797(VarCurr,bitIndex4)& -v797(VarCurr,bitIndex3)& -v797(VarCurr,bitIndex2)& -v797(VarCurr,bitIndex1)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->$F)))).
% 297.56/295.82  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)& -v797(VarCurr,bitIndex20)& -v797(VarCurr,bitIndex19)& -v797(VarCurr,bitIndex18)& -v797(VarCurr,bitIndex17)& -v797(VarCurr,bitIndex16)& -v797(VarCurr,bitIndex15)& -v797(VarCurr,bitIndex14)& -v797(VarCurr,bitIndex13)& -v797(VarCurr,bitIndex12)& -v797(VarCurr,bitIndex11)& -v797(VarCurr,bitIndex10)& -v797(VarCurr,bitIndex9)& -v797(VarCurr,bitIndex8)& -v797(VarCurr,bitIndex7)& -v797(VarCurr,bitIndex6)& -v797(VarCurr,bitIndex5)& -v797(VarCurr,bitIndex4)& -v797(VarCurr,bitIndex3)& -v797(VarCurr,bitIndex2)&v797(VarCurr,bitIndex1)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b000001(B))))).
% 297.56/295.84  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)& -v797(VarCurr,bitIndex20)& -v797(VarCurr,bitIndex19)& -v797(VarCurr,bitIndex18)& -v797(VarCurr,bitIndex17)& -v797(VarCurr,bitIndex16)& -v797(VarCurr,bitIndex15)& -v797(VarCurr,bitIndex14)& -v797(VarCurr,bitIndex13)& -v797(VarCurr,bitIndex12)& -v797(VarCurr,bitIndex11)& -v797(VarCurr,bitIndex10)& -v797(VarCurr,bitIndex9)& -v797(VarCurr,bitIndex8)& -v797(VarCurr,bitIndex7)& -v797(VarCurr,bitIndex6)& -v797(VarCurr,bitIndex5)& -v797(VarCurr,bitIndex4)& -v797(VarCurr,bitIndex3)&v797(VarCurr,bitIndex2)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b000010(B))))).
% 297.56/295.84  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)& -v797(VarCurr,bitIndex20)& -v797(VarCurr,bitIndex19)& -v797(VarCurr,bitIndex18)& -v797(VarCurr,bitIndex17)& -v797(VarCurr,bitIndex16)& -v797(VarCurr,bitIndex15)& -v797(VarCurr,bitIndex14)& -v797(VarCurr,bitIndex13)& -v797(VarCurr,bitIndex12)& -v797(VarCurr,bitIndex11)& -v797(VarCurr,bitIndex10)& -v797(VarCurr,bitIndex9)& -v797(VarCurr,bitIndex8)& -v797(VarCurr,bitIndex7)& -v797(VarCurr,bitIndex6)& -v797(VarCurr,bitIndex5)& -v797(VarCurr,bitIndex4)&v797(VarCurr,bitIndex3)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b000011(B))))).
% 297.65/295.86  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)& -v797(VarCurr,bitIndex20)& -v797(VarCurr,bitIndex19)& -v797(VarCurr,bitIndex18)& -v797(VarCurr,bitIndex17)& -v797(VarCurr,bitIndex16)& -v797(VarCurr,bitIndex15)& -v797(VarCurr,bitIndex14)& -v797(VarCurr,bitIndex13)& -v797(VarCurr,bitIndex12)& -v797(VarCurr,bitIndex11)& -v797(VarCurr,bitIndex10)& -v797(VarCurr,bitIndex9)& -v797(VarCurr,bitIndex8)& -v797(VarCurr,bitIndex7)& -v797(VarCurr,bitIndex6)& -v797(VarCurr,bitIndex5)&v797(VarCurr,bitIndex4)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b000100(B))))).
% 297.65/295.86  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)& -v797(VarCurr,bitIndex20)& -v797(VarCurr,bitIndex19)& -v797(VarCurr,bitIndex18)& -v797(VarCurr,bitIndex17)& -v797(VarCurr,bitIndex16)& -v797(VarCurr,bitIndex15)& -v797(VarCurr,bitIndex14)& -v797(VarCurr,bitIndex13)& -v797(VarCurr,bitIndex12)& -v797(VarCurr,bitIndex11)& -v797(VarCurr,bitIndex10)& -v797(VarCurr,bitIndex9)& -v797(VarCurr,bitIndex8)& -v797(VarCurr,bitIndex7)& -v797(VarCurr,bitIndex6)&v797(VarCurr,bitIndex5)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b000101(B))))).
% 297.65/295.86  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)& -v797(VarCurr,bitIndex20)& -v797(VarCurr,bitIndex19)& -v797(VarCurr,bitIndex18)& -v797(VarCurr,bitIndex17)& -v797(VarCurr,bitIndex16)& -v797(VarCurr,bitIndex15)& -v797(VarCurr,bitIndex14)& -v797(VarCurr,bitIndex13)& -v797(VarCurr,bitIndex12)& -v797(VarCurr,bitIndex11)& -v797(VarCurr,bitIndex10)& -v797(VarCurr,bitIndex9)& -v797(VarCurr,bitIndex8)& -v797(VarCurr,bitIndex7)&v797(VarCurr,bitIndex6)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b000110(B))))).
% 297.69/295.88  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)& -v797(VarCurr,bitIndex20)& -v797(VarCurr,bitIndex19)& -v797(VarCurr,bitIndex18)& -v797(VarCurr,bitIndex17)& -v797(VarCurr,bitIndex16)& -v797(VarCurr,bitIndex15)& -v797(VarCurr,bitIndex14)& -v797(VarCurr,bitIndex13)& -v797(VarCurr,bitIndex12)& -v797(VarCurr,bitIndex11)& -v797(VarCurr,bitIndex10)& -v797(VarCurr,bitIndex9)& -v797(VarCurr,bitIndex8)&v797(VarCurr,bitIndex7)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b000111(B))))).
% 297.69/295.88  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)& -v797(VarCurr,bitIndex20)& -v797(VarCurr,bitIndex19)& -v797(VarCurr,bitIndex18)& -v797(VarCurr,bitIndex17)& -v797(VarCurr,bitIndex16)& -v797(VarCurr,bitIndex15)& -v797(VarCurr,bitIndex14)& -v797(VarCurr,bitIndex13)& -v797(VarCurr,bitIndex12)& -v797(VarCurr,bitIndex11)& -v797(VarCurr,bitIndex10)& -v797(VarCurr,bitIndex9)&v797(VarCurr,bitIndex8)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b001000(B))))).
% 297.69/295.90  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)& -v797(VarCurr,bitIndex20)& -v797(VarCurr,bitIndex19)& -v797(VarCurr,bitIndex18)& -v797(VarCurr,bitIndex17)& -v797(VarCurr,bitIndex16)& -v797(VarCurr,bitIndex15)& -v797(VarCurr,bitIndex14)& -v797(VarCurr,bitIndex13)& -v797(VarCurr,bitIndex12)& -v797(VarCurr,bitIndex11)& -v797(VarCurr,bitIndex10)&v797(VarCurr,bitIndex9)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b001001(B))))).
% 297.69/295.90  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)& -v797(VarCurr,bitIndex20)& -v797(VarCurr,bitIndex19)& -v797(VarCurr,bitIndex18)& -v797(VarCurr,bitIndex17)& -v797(VarCurr,bitIndex16)& -v797(VarCurr,bitIndex15)& -v797(VarCurr,bitIndex14)& -v797(VarCurr,bitIndex13)& -v797(VarCurr,bitIndex12)& -v797(VarCurr,bitIndex11)&v797(VarCurr,bitIndex10)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b001010(B))))).
% 297.69/295.90  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)& -v797(VarCurr,bitIndex20)& -v797(VarCurr,bitIndex19)& -v797(VarCurr,bitIndex18)& -v797(VarCurr,bitIndex17)& -v797(VarCurr,bitIndex16)& -v797(VarCurr,bitIndex15)& -v797(VarCurr,bitIndex14)& -v797(VarCurr,bitIndex13)& -v797(VarCurr,bitIndex12)&v797(VarCurr,bitIndex11)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b001011(B))))).
% 297.69/295.92  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)& -v797(VarCurr,bitIndex20)& -v797(VarCurr,bitIndex19)& -v797(VarCurr,bitIndex18)& -v797(VarCurr,bitIndex17)& -v797(VarCurr,bitIndex16)& -v797(VarCurr,bitIndex15)& -v797(VarCurr,bitIndex14)& -v797(VarCurr,bitIndex13)&v797(VarCurr,bitIndex12)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b001100(B))))).
% 297.69/295.92  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)& -v797(VarCurr,bitIndex20)& -v797(VarCurr,bitIndex19)& -v797(VarCurr,bitIndex18)& -v797(VarCurr,bitIndex17)& -v797(VarCurr,bitIndex16)& -v797(VarCurr,bitIndex15)& -v797(VarCurr,bitIndex14)&v797(VarCurr,bitIndex13)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b001101(B))))).
% 297.74/295.95  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)& -v797(VarCurr,bitIndex20)& -v797(VarCurr,bitIndex19)& -v797(VarCurr,bitIndex18)& -v797(VarCurr,bitIndex17)& -v797(VarCurr,bitIndex16)& -v797(VarCurr,bitIndex15)&v797(VarCurr,bitIndex14)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b001110(B))))).
% 297.74/295.95  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)& -v797(VarCurr,bitIndex20)& -v797(VarCurr,bitIndex19)& -v797(VarCurr,bitIndex18)& -v797(VarCurr,bitIndex17)& -v797(VarCurr,bitIndex16)&v797(VarCurr,bitIndex15)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b001111(B))))).
% 297.74/295.95  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)& -v797(VarCurr,bitIndex20)& -v797(VarCurr,bitIndex19)& -v797(VarCurr,bitIndex18)& -v797(VarCurr,bitIndex17)&v797(VarCurr,bitIndex16)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b010000(B))))).
% 297.76/295.97  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)& -v797(VarCurr,bitIndex20)& -v797(VarCurr,bitIndex19)& -v797(VarCurr,bitIndex18)&v797(VarCurr,bitIndex17)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b010001(B))))).
% 297.76/295.97  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)& -v797(VarCurr,bitIndex20)& -v797(VarCurr,bitIndex19)&v797(VarCurr,bitIndex18)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b010010(B))))).
% 297.76/295.97  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)& -v797(VarCurr,bitIndex20)&v797(VarCurr,bitIndex19)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b010011(B))))).
% 297.76/295.97  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)& -v797(VarCurr,bitIndex21)&v797(VarCurr,bitIndex20)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b010100(B))))).
% 297.76/295.99  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)& -v797(VarCurr,bitIndex22)&v797(VarCurr,bitIndex21)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b010101(B))))).
% 297.76/295.99  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)& -v797(VarCurr,bitIndex23)&v797(VarCurr,bitIndex22)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b010110(B))))).
% 297.76/295.99  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)& -v797(VarCurr,bitIndex24)&v797(VarCurr,bitIndex23)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b010111(B))))).
% 297.76/296.01  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)& -v797(VarCurr,bitIndex25)&v797(VarCurr,bitIndex24)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b011000(B))))).
% 297.76/296.01  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)& -v797(VarCurr,bitIndex26)&v797(VarCurr,bitIndex25)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b011001(B))))).
% 297.76/296.01  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)& -v797(VarCurr,bitIndex27)&v797(VarCurr,bitIndex26)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b011010(B))))).
% 297.83/296.03  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)& -v797(VarCurr,bitIndex28)&v797(VarCurr,bitIndex27)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b011011(B))))).
% 297.83/296.03  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)& -v797(VarCurr,bitIndex29)&v797(VarCurr,bitIndex28)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b011100(B))))).
% 297.83/296.03  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)& -v797(VarCurr,bitIndex30)&v797(VarCurr,bitIndex29)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b011101(B))))).
% 297.83/296.03  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)& -v797(VarCurr,bitIndex31)&v797(VarCurr,bitIndex30)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b011110(B))))).
% 297.83/296.05  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)& -v797(VarCurr,bitIndex32)&v797(VarCurr,bitIndex31)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b011111(B))))).
% 297.83/296.05  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)& -v797(VarCurr,bitIndex33)&v797(VarCurr,bitIndex32)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b100000(B))))).
% 297.83/296.05  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)& -v797(VarCurr,bitIndex34)&v797(VarCurr,bitIndex33)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b100001(B))))).
% 297.83/296.05  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)& -v797(VarCurr,bitIndex35)&v797(VarCurr,bitIndex34)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b100010(B))))).
% 297.83/296.05  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)& -v797(VarCurr,bitIndex36)&v797(VarCurr,bitIndex35)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b100011(B))))).
% 297.87/296.07  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)& -v797(VarCurr,bitIndex37)&v797(VarCurr,bitIndex36)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b100100(B))))).
% 297.87/296.07  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)& -v797(VarCurr,bitIndex38)&v797(VarCurr,bitIndex37)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b100101(B))))).
% 297.87/296.07  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)& -v797(VarCurr,bitIndex39)&v797(VarCurr,bitIndex38)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b100110(B))))).
% 297.87/296.07  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)& -v797(VarCurr,bitIndex40)&v797(VarCurr,bitIndex39)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b100111(B))))).
% 297.87/296.07  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)& -v797(VarCurr,bitIndex41)&v797(VarCurr,bitIndex40)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b101000(B))))).
% 297.87/296.09  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)& -v797(VarCurr,bitIndex42)&v797(VarCurr,bitIndex41)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b101001(B))))).
% 297.87/296.09  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)& -v797(VarCurr,bitIndex43)&v797(VarCurr,bitIndex42)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b101010(B))))).
% 297.87/296.09  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)& -v797(VarCurr,bitIndex44)&v797(VarCurr,bitIndex43)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b101011(B))))).
% 297.87/296.09  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)& -v797(VarCurr,bitIndex45)&v797(VarCurr,bitIndex44)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b101100(B))))).
% 297.87/296.09  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)& -v797(VarCurr,bitIndex46)&v797(VarCurr,bitIndex45)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b101101(B))))).
% 297.87/296.09  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)& -v797(VarCurr,bitIndex47)&v797(VarCurr,bitIndex46)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b101110(B))))).
% 297.87/296.09  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)& -v797(VarCurr,bitIndex48)&v797(VarCurr,bitIndex47)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b101111(B))))).
% 297.87/296.10  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)& -v797(VarCurr,bitIndex49)&v797(VarCurr,bitIndex48)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b110000(B))))).
% 297.87/296.10  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)& -v797(VarCurr,bitIndex50)&v797(VarCurr,bitIndex49)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b110001(B))))).
% 297.87/296.10  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)& -v797(VarCurr,bitIndex51)&v797(VarCurr,bitIndex50)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b110010(B))))).
% 297.87/296.10  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)& -v797(VarCurr,bitIndex52)&v797(VarCurr,bitIndex51)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b110011(B))))).
% 297.87/296.10  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)& -v797(VarCurr,bitIndex53)&v797(VarCurr,bitIndex52)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b110100(B))))).
% 297.87/296.10  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)& -v797(VarCurr,bitIndex54)&v797(VarCurr,bitIndex53)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b110101(B))))).
% 297.87/296.10  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)& -v797(VarCurr,bitIndex55)&v797(VarCurr,bitIndex54)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b110110(B))))).
% 297.87/296.10  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)& -v797(VarCurr,bitIndex56)&v797(VarCurr,bitIndex55)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b110111(B))))).
% 297.87/296.10  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)& -v797(VarCurr,bitIndex57)&v797(VarCurr,bitIndex56)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b111000(B))))).
% 297.87/296.10  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)& -v797(VarCurr,bitIndex58)&v797(VarCurr,bitIndex57)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b111001(B))))).
% 297.87/296.10  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)& -v797(VarCurr,bitIndex59)&v797(VarCurr,bitIndex58)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b111010(B))))).
% 297.87/296.10  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)& -v797(VarCurr,bitIndex60)&v797(VarCurr,bitIndex59)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b111011(B))))).
% 297.87/296.10  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)& -v797(VarCurr,bitIndex61)&v797(VarCurr,bitIndex60)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b111100(B))))).
% 297.87/296.10  all VarCurr (-v797(VarCurr,bitIndex63)& -v797(VarCurr,bitIndex62)&v797(VarCurr,bitIndex61)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b111101(B))))).
% 297.87/296.10  all VarCurr (-v797(VarCurr,bitIndex63)&v797(VarCurr,bitIndex62)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->b111110(B))))).
% 297.87/296.10  all VarCurr (v797(VarCurr,bitIndex63)-> (all B (range_5_0(B)-> (v4454(VarCurr,B)<->$T)))).
% 297.87/296.10  all VarCurr (v797(VarCurr,bitIndex1)<->v799(VarCurr,bitIndex1)).
% 297.87/296.10  all VarCurr (v799(VarCurr,bitIndex1)<->v2593(VarCurr,bitIndex1)).
% 297.87/296.10  all VarCurr (v801(VarCurr,bitIndex1)<->v2592(VarCurr,bitIndex1)).
% 297.87/296.10  all VarCurr (v2381(VarCurr,bitIndex1)<->v2383(VarCurr,bitIndex1)).
% 297.87/296.10  all VarCurr (v2383(VarCurr,bitIndex1)<->v2385(VarCurr,bitIndex1)).
% 297.87/296.10  all VarCurr (v2385(VarCurr,bitIndex1)<->v2387(VarCurr,bitIndex1)).
% 297.87/296.10  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4446(VarNext)-> (v2387(VarNext,bitIndex1)<->v2387(VarCurr,bitIndex1)))).
% 297.87/296.10  all VarNext (v4446(VarNext)-> (v2387(VarNext,bitIndex1)<->v4448(VarNext))).
% 297.87/296.10  all VarCurr (v4448(VarCurr)<->v2389(VarCurr)&v4449(VarCurr)).
% 297.87/296.10  all VarCurr (v4449(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex2078))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex2077))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex2076))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex2075))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex2074))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex2073))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex2072))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex2071))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex2070))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex2069))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex2068))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex2067))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex2066))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex2065))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex2064))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex2063))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex2062))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex2061))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex2060))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex2059))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex2058))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex2057))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex2056))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex2055))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex2054))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex2053))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex2052))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex2051))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex2050))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex2049))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex2048))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex2047))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex2046))).
% 297.87/296.10  all VarCurr (v4446(VarCurr)<->v4447(VarCurr)&v2585(VarCurr)).
% 297.87/296.10  all VarCurr (-v4447(VarCurr)<->v2547(VarCurr)).
% 297.87/296.10  all VarNext (v803(VarNext,bitIndex1)<->v4437(VarNext,bitIndex1)).
% 297.87/296.10  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4439(VarNext)-> (all B (range_63_0(B)-> (v4437(VarNext,B)<->v803(VarCurr,B)))))).
% 297.87/296.10  all VarNext (v4439(VarNext)-> (all B (range_63_0(B)-> (v4437(VarNext,B)<->v2377(VarNext,B))))).
% 297.87/296.11  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4439(VarNext)<->v4440(VarNext)&v2370(VarNext))).
% 297.87/296.11  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4440(VarNext)<->v4442(VarNext)&v2343(VarNext))).
% 297.87/296.11  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4442(VarNext)<->v2350(VarNext))).
% 297.87/296.11  all VarNext (v959(VarNext,bitIndex1)<->v4429(VarNext,bitIndex1)).
% 297.87/296.11  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4430(VarNext)-> (all B (range_63_0(B)-> (v4429(VarNext,B)<->v959(VarCurr,B)))))).
% 297.87/296.11  all VarNext (v4430(VarNext)-> (all B (range_63_0(B)-> (v4429(VarNext,B)<->v2356(VarNext,B))))).
% 297.87/296.11  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4430(VarNext)<->v4431(VarNext))).
% 297.87/296.11  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4431(VarNext)<->v4433(VarNext)&v2343(VarNext))).
% 297.87/296.11  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4433(VarNext)<->v2350(VarNext))).
% 297.87/296.11  all VarCurr (v961(VarCurr,bitIndex1)<->v2339(VarCurr,bitIndex1)).
% 297.87/296.11  all VarCurr (-v4427(VarCurr)-> (v2327(VarCurr,bitIndex1)<->$F)).
% 297.87/296.11  all VarCurr (v4427(VarCurr)-> (v2327(VarCurr,bitIndex1)<->v2329(VarCurr))).
% 297.87/296.11  all VarCurr (v4427(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 297.87/296.11  all VarCurr (v963(VarCurr,bitIndex1)<->v2325(VarCurr,bitIndex1)).
% 297.87/296.11  all VarCurr (-v4425(VarCurr)-> (v965(VarCurr,bitIndex1)<->$F)).
% 297.87/296.11  all VarCurr (v4425(VarCurr)-> (v965(VarCurr,bitIndex1)<->v2201(VarCurr))).
% 297.87/296.11  all VarCurr (v4425(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 297.87/296.11  all VarCurr (v797(VarCurr,bitIndex2)<->v799(VarCurr,bitIndex2)).
% 297.87/296.11  all VarCurr (v799(VarCurr,bitIndex2)<->v2593(VarCurr,bitIndex2)).
% 297.87/296.11  all VarCurr (v801(VarCurr,bitIndex2)<->v2592(VarCurr,bitIndex2)).
% 297.87/296.11  all VarCurr (v2381(VarCurr,bitIndex2)<->v2383(VarCurr,bitIndex2)).
% 297.87/296.11  all VarCurr (v2383(VarCurr,bitIndex2)<->v2385(VarCurr,bitIndex2)).
% 297.87/296.11  all VarCurr (v2385(VarCurr,bitIndex2)<->v2387(VarCurr,bitIndex2)).
% 297.87/296.11  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4416(VarNext)-> (v2387(VarNext,bitIndex2)<->v2387(VarCurr,bitIndex2)))).
% 297.87/296.11  all VarNext (v4416(VarNext)-> (v2387(VarNext,bitIndex2)<->v4418(VarNext))).
% 297.87/296.11  all VarCurr (v4418(VarCurr)<->v2389(VarCurr)&v4419(VarCurr)).
% 297.87/296.11  all VarCurr (v4419(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex2045))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex2044))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex2043))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex2042))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex2041))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex2040))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex2039))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex2038))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex2037))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex2036))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex2035))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex2034))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex2033))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex2032))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex2031))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex2030))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex2029))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex2028))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex2027))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex2026))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex2025))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex2024))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex2023))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex2022))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex2021))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex2020))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex2019))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex2018))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex2017))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex2016))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex2015))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex2014))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex2013))).
% 297.87/296.12  all VarCurr (v4416(VarCurr)<->v4417(VarCurr)&v2585(VarCurr)).
% 297.87/296.12  all VarCurr (-v4417(VarCurr)<->v2547(VarCurr)).
% 297.87/296.12  all VarNext (v803(VarNext,bitIndex2)<->v4407(VarNext,bitIndex2)).
% 297.87/296.12  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4409(VarNext)-> (all B (range_63_0(B)-> (v4407(VarNext,B)<->v803(VarCurr,B)))))).
% 297.87/296.12  all VarNext (v4409(VarNext)-> (all B (range_63_0(B)-> (v4407(VarNext,B)<->v2377(VarNext,B))))).
% 297.87/296.12  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4409(VarNext)<->v4410(VarNext)&v2370(VarNext))).
% 297.87/296.12  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4410(VarNext)<->v4412(VarNext)&v2343(VarNext))).
% 297.87/296.12  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4412(VarNext)<->v2350(VarNext))).
% 297.87/296.12  all VarNext (v959(VarNext,bitIndex2)<->v4399(VarNext,bitIndex2)).
% 297.87/296.12  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4400(VarNext)-> (all B (range_63_0(B)-> (v4399(VarNext,B)<->v959(VarCurr,B)))))).
% 297.87/296.12  all VarNext (v4400(VarNext)-> (all B (range_63_0(B)-> (v4399(VarNext,B)<->v2356(VarNext,B))))).
% 297.87/296.12  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4400(VarNext)<->v4401(VarNext))).
% 297.87/296.12  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4401(VarNext)<->v4403(VarNext)&v2343(VarNext))).
% 297.87/296.12  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4403(VarNext)<->v2350(VarNext))).
% 297.87/296.12  all VarCurr (v961(VarCurr,bitIndex2)<->v2339(VarCurr,bitIndex2)).
% 297.87/296.12  all VarCurr (-v4397(VarCurr)-> (v2327(VarCurr,bitIndex2)<->$F)).
% 297.87/296.12  all VarCurr (v4397(VarCurr)-> (v2327(VarCurr,bitIndex2)<->v2329(VarCurr))).
% 297.87/296.12  all VarCurr (v4397(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 297.87/296.12  all VarCurr (v963(VarCurr,bitIndex2)<->v2325(VarCurr,bitIndex2)).
% 297.87/296.12  all VarCurr (-v4395(VarCurr)-> (v965(VarCurr,bitIndex2)<->$F)).
% 297.87/296.12  all VarCurr (v4395(VarCurr)-> (v965(VarCurr,bitIndex2)<->v2201(VarCurr))).
% 297.87/296.12  all VarCurr (v4395(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 297.87/296.12  all VarCurr (v797(VarCurr,bitIndex3)<->v799(VarCurr,bitIndex3)).
% 297.87/296.12  all VarCurr (v799(VarCurr,bitIndex3)<->v2593(VarCurr,bitIndex3)).
% 297.87/296.12  all VarCurr (v801(VarCurr,bitIndex3)<->v2592(VarCurr,bitIndex3)).
% 297.87/296.12  all VarCurr (v2381(VarCurr,bitIndex3)<->v2383(VarCurr,bitIndex3)).
% 297.87/296.12  all VarCurr (v2383(VarCurr,bitIndex3)<->v2385(VarCurr,bitIndex3)).
% 297.87/296.12  all VarCurr (v2385(VarCurr,bitIndex3)<->v2387(VarCurr,bitIndex3)).
% 297.87/296.12  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4386(VarNext)-> (v2387(VarNext,bitIndex3)<->v2387(VarCurr,bitIndex3)))).
% 297.87/296.12  all VarNext (v4386(VarNext)-> (v2387(VarNext,bitIndex3)<->v4388(VarNext))).
% 297.87/296.12  all VarCurr (v4388(VarCurr)<->v2389(VarCurr)&v4389(VarCurr)).
% 297.87/296.12  all VarCurr (v4389(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex2012))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex2011))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex2010))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex2009))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex2008))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex2007))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex2006))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex2005))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex2004))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex2003))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex2002))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex2001))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex2000))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1999))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1998))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1997))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1996))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1995))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1994))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1993))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1992))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1991))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1990))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1989))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1988))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1987))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1986))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1985))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1984))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1983))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1982))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1981))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1980))).
% 297.87/296.12  all VarCurr (v4386(VarCurr)<->v4387(VarCurr)&v2585(VarCurr)).
% 297.87/296.12  all VarCurr (-v4387(VarCurr)<->v2547(VarCurr)).
% 297.87/296.12  all VarNext (v803(VarNext,bitIndex3)<->v4377(VarNext,bitIndex3)).
% 297.87/296.12  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4379(VarNext)-> (all B (range_63_0(B)-> (v4377(VarNext,B)<->v803(VarCurr,B)))))).
% 297.87/296.12  all VarNext (v4379(VarNext)-> (all B (range_63_0(B)-> (v4377(VarNext,B)<->v2377(VarNext,B))))).
% 297.87/296.12  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4379(VarNext)<->v4380(VarNext)&v2370(VarNext))).
% 297.87/296.12  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4380(VarNext)<->v4382(VarNext)&v2343(VarNext))).
% 297.87/296.12  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4382(VarNext)<->v2350(VarNext))).
% 297.87/296.12  all VarNext (v959(VarNext,bitIndex3)<->v4369(VarNext,bitIndex3)).
% 297.87/296.12  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4370(VarNext)-> (all B (range_63_0(B)-> (v4369(VarNext,B)<->v959(VarCurr,B)))))).
% 297.87/296.12  all VarNext (v4370(VarNext)-> (all B (range_63_0(B)-> (v4369(VarNext,B)<->v2356(VarNext,B))))).
% 297.87/296.12  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4370(VarNext)<->v4371(VarNext))).
% 297.87/296.12  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4371(VarNext)<->v4373(VarNext)&v2343(VarNext))).
% 297.87/296.12  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4373(VarNext)<->v2350(VarNext))).
% 297.87/296.12  all VarCurr (v961(VarCurr,bitIndex3)<->v2339(VarCurr,bitIndex3)).
% 297.87/296.12  all VarCurr (-v4367(VarCurr)-> (v2327(VarCurr,bitIndex3)<->$F)).
% 297.87/296.12  all VarCurr (v4367(VarCurr)-> (v2327(VarCurr,bitIndex3)<->v2329(VarCurr))).
% 297.87/296.12  all VarCurr (v4367(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 297.87/296.12  all VarCurr (v963(VarCurr,bitIndex3)<->v2325(VarCurr,bitIndex3)).
% 297.87/296.12  all VarCurr (-v4365(VarCurr)-> (v965(VarCurr,bitIndex3)<->$F)).
% 297.87/296.12  all VarCurr (v4365(VarCurr)-> (v965(VarCurr,bitIndex3)<->v2201(VarCurr))).
% 297.87/296.12  all VarCurr (v4365(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 297.87/296.12  all VarCurr (v797(VarCurr,bitIndex4)<->v799(VarCurr,bitIndex4)).
% 297.87/296.12  all VarCurr (v799(VarCurr,bitIndex4)<->v2593(VarCurr,bitIndex4)).
% 297.87/296.12  all VarCurr (v801(VarCurr,bitIndex4)<->v2592(VarCurr,bitIndex4)).
% 297.87/296.12  all VarCurr (v2381(VarCurr,bitIndex4)<->v2383(VarCurr,bitIndex4)).
% 297.87/296.12  all VarCurr (v2383(VarCurr,bitIndex4)<->v2385(VarCurr,bitIndex4)).
% 297.87/296.12  all VarCurr (v2385(VarCurr,bitIndex4)<->v2387(VarCurr,bitIndex4)).
% 297.87/296.12  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4356(VarNext)-> (v2387(VarNext,bitIndex4)<->v2387(VarCurr,bitIndex4)))).
% 297.87/296.12  all VarNext (v4356(VarNext)-> (v2387(VarNext,bitIndex4)<->v4358(VarNext))).
% 297.87/296.12  all VarCurr (v4358(VarCurr)<->v2389(VarCurr)&v4359(VarCurr)).
% 297.87/296.12  all VarCurr (v4359(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1979))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1978))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1977))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1976))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1975))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1974))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1973))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1972))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1971))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1970))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1969))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1968))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1967))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1966))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1965))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1964))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1963))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1962))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1961))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1960))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1959))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1958))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1957))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1956))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1955))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1954))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1953))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1952))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1951))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1950))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1949))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1948))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1947))).
% 297.87/296.13  all VarCurr (v4356(VarCurr)<->v4357(VarCurr)&v2585(VarCurr)).
% 297.87/296.13  all VarCurr (-v4357(VarCurr)<->v2547(VarCurr)).
% 297.87/296.13  all VarNext (v803(VarNext,bitIndex4)<->v4347(VarNext,bitIndex4)).
% 297.87/296.13  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4349(VarNext)-> (all B (range_63_0(B)-> (v4347(VarNext,B)<->v803(VarCurr,B)))))).
% 297.87/296.13  all VarNext (v4349(VarNext)-> (all B (range_63_0(B)-> (v4347(VarNext,B)<->v2377(VarNext,B))))).
% 297.87/296.13  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4349(VarNext)<->v4350(VarNext)&v2370(VarNext))).
% 297.87/296.13  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4350(VarNext)<->v4352(VarNext)&v2343(VarNext))).
% 297.87/296.13  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4352(VarNext)<->v2350(VarNext))).
% 297.87/296.13  all VarNext (v959(VarNext,bitIndex4)<->v4339(VarNext,bitIndex4)).
% 297.87/296.13  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4340(VarNext)-> (all B (range_63_0(B)-> (v4339(VarNext,B)<->v959(VarCurr,B)))))).
% 297.87/296.13  all VarNext (v4340(VarNext)-> (all B (range_63_0(B)-> (v4339(VarNext,B)<->v2356(VarNext,B))))).
% 297.87/296.13  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4340(VarNext)<->v4341(VarNext))).
% 297.87/296.13  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4341(VarNext)<->v4343(VarNext)&v2343(VarNext))).
% 297.87/296.13  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4343(VarNext)<->v2350(VarNext))).
% 297.87/296.13  all VarCurr (v961(VarCurr,bitIndex4)<->v2339(VarCurr,bitIndex4)).
% 297.87/296.13  all VarCurr (-v4337(VarCurr)-> (v2327(VarCurr,bitIndex4)<->$F)).
% 297.87/296.13  all VarCurr (v4337(VarCurr)-> (v2327(VarCurr,bitIndex4)<->v2329(VarCurr))).
% 297.87/296.13  all VarCurr (v4337(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 297.87/296.13  all VarCurr (v963(VarCurr,bitIndex4)<->v2325(VarCurr,bitIndex4)).
% 297.87/296.13  all VarCurr (-v4335(VarCurr)-> (v965(VarCurr,bitIndex4)<->$F)).
% 297.87/296.13  all VarCurr (v4335(VarCurr)-> (v965(VarCurr,bitIndex4)<->v2201(VarCurr))).
% 297.87/296.13  all VarCurr (v4335(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 297.87/296.13  all VarCurr (v797(VarCurr,bitIndex5)<->v799(VarCurr,bitIndex5)).
% 297.87/296.13  all VarCurr (v799(VarCurr,bitIndex5)<->v2593(VarCurr,bitIndex5)).
% 297.87/296.13  all VarCurr (v801(VarCurr,bitIndex5)<->v2592(VarCurr,bitIndex5)).
% 297.87/296.13  all VarCurr (v2381(VarCurr,bitIndex5)<->v2383(VarCurr,bitIndex5)).
% 297.87/296.13  all VarCurr (v2383(VarCurr,bitIndex5)<->v2385(VarCurr,bitIndex5)).
% 297.87/296.14  all VarCurr (v2385(VarCurr,bitIndex5)<->v2387(VarCurr,bitIndex5)).
% 297.87/296.14  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4326(VarNext)-> (v2387(VarNext,bitIndex5)<->v2387(VarCurr,bitIndex5)))).
% 297.87/296.14  all VarNext (v4326(VarNext)-> (v2387(VarNext,bitIndex5)<->v4328(VarNext))).
% 297.87/296.14  all VarCurr (v4328(VarCurr)<->v2389(VarCurr)&v4329(VarCurr)).
% 297.87/296.14  all VarCurr (v4329(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1946))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1945))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1944))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1943))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1942))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1941))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1940))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1939))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1938))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1937))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1936))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1935))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1934))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1933))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1932))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1931))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1930))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1929))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1928))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1927))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1926))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1925))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1924))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1923))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1922))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1921))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1920))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1919))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1918))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1917))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1916))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1915))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1914))).
% 297.87/296.14  all VarCurr (v4326(VarCurr)<->v4327(VarCurr)&v2585(VarCurr)).
% 297.87/296.14  all VarCurr (-v4327(VarCurr)<->v2547(VarCurr)).
% 297.87/296.14  all VarNext (v803(VarNext,bitIndex5)<->v4317(VarNext,bitIndex5)).
% 297.87/296.14  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4319(VarNext)-> (all B (range_63_0(B)-> (v4317(VarNext,B)<->v803(VarCurr,B)))))).
% 297.87/296.14  all VarNext (v4319(VarNext)-> (all B (range_63_0(B)-> (v4317(VarNext,B)<->v2377(VarNext,B))))).
% 297.87/296.14  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4319(VarNext)<->v4320(VarNext)&v2370(VarNext))).
% 297.87/296.14  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4320(VarNext)<->v4322(VarNext)&v2343(VarNext))).
% 297.87/296.14  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4322(VarNext)<->v2350(VarNext))).
% 297.87/296.14  all VarNext (v959(VarNext,bitIndex5)<->v4309(VarNext,bitIndex5)).
% 297.87/296.14  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4310(VarNext)-> (all B (range_63_0(B)-> (v4309(VarNext,B)<->v959(VarCurr,B)))))).
% 297.87/296.14  all VarNext (v4310(VarNext)-> (all B (range_63_0(B)-> (v4309(VarNext,B)<->v2356(VarNext,B))))).
% 297.87/296.14  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4310(VarNext)<->v4311(VarNext))).
% 297.87/296.14  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4311(VarNext)<->v4313(VarNext)&v2343(VarNext))).
% 297.87/296.14  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4313(VarNext)<->v2350(VarNext))).
% 297.87/296.14  all VarCurr (v961(VarCurr,bitIndex5)<->v2339(VarCurr,bitIndex5)).
% 297.87/296.14  all VarCurr (-v4307(VarCurr)-> (v2327(VarCurr,bitIndex5)<->$F)).
% 297.87/296.14  all VarCurr (v4307(VarCurr)-> (v2327(VarCurr,bitIndex5)<->v2329(VarCurr))).
% 297.87/296.14  all VarCurr (v4307(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 297.87/296.14  all VarCurr (v963(VarCurr,bitIndex5)<->v2325(VarCurr,bitIndex5)).
% 297.87/296.14  all VarCurr (-v4305(VarCurr)-> (v965(VarCurr,bitIndex5)<->$F)).
% 297.95/296.15  all VarCurr (v4305(VarCurr)-> (v965(VarCurr,bitIndex5)<->v2201(VarCurr))).
% 297.95/296.15  all VarCurr (v4305(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 297.95/296.15  all VarCurr (v797(VarCurr,bitIndex6)<->v799(VarCurr,bitIndex6)).
% 297.95/296.15  all VarCurr (v799(VarCurr,bitIndex6)<->v2593(VarCurr,bitIndex6)).
% 297.95/296.15  all VarCurr (v801(VarCurr,bitIndex6)<->v2592(VarCurr,bitIndex6)).
% 297.95/296.15  all VarCurr (v2381(VarCurr,bitIndex6)<->v2383(VarCurr,bitIndex6)).
% 297.95/296.15  all VarCurr (v2383(VarCurr,bitIndex6)<->v2385(VarCurr,bitIndex6)).
% 297.95/296.15  all VarCurr (v2385(VarCurr,bitIndex6)<->v2387(VarCurr,bitIndex6)).
% 297.95/296.15  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4296(VarNext)-> (v2387(VarNext,bitIndex6)<->v2387(VarCurr,bitIndex6)))).
% 297.95/296.15  all VarNext (v4296(VarNext)-> (v2387(VarNext,bitIndex6)<->v4298(VarNext))).
% 297.95/296.15  all VarCurr (v4298(VarCurr)<->v2389(VarCurr)&v4299(VarCurr)).
% 297.95/296.15  all VarCurr (v4299(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1913))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1912))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1911))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1910))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1909))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1908))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1907))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1906))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1905))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1904))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1903))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1902))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1901))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1900))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1899))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1898))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1897))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1896))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1895))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1894))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1893))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1892))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1891))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1890))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1889))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1888))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1887))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1886))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1885))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1884))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1883))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1882))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1881))).
% 297.95/296.15  all VarCurr (v4296(VarCurr)<->v4297(VarCurr)&v2585(VarCurr)).
% 297.95/296.15  all VarCurr (-v4297(VarCurr)<->v2547(VarCurr)).
% 297.95/296.15  all VarNext (v803(VarNext,bitIndex6)<->v4287(VarNext,bitIndex6)).
% 297.95/296.15  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4289(VarNext)-> (all B (range_63_0(B)-> (v4287(VarNext,B)<->v803(VarCurr,B)))))).
% 297.95/296.15  all VarNext (v4289(VarNext)-> (all B (range_63_0(B)-> (v4287(VarNext,B)<->v2377(VarNext,B))))).
% 297.95/296.15  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4289(VarNext)<->v4290(VarNext)&v2370(VarNext))).
% 297.95/296.15  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4290(VarNext)<->v4292(VarNext)&v2343(VarNext))).
% 297.95/296.15  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4292(VarNext)<->v2350(VarNext))).
% 297.95/296.15  all VarNext (v959(VarNext,bitIndex6)<->v4279(VarNext,bitIndex6)).
% 297.95/296.15  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4280(VarNext)-> (all B (range_63_0(B)-> (v4279(VarNext,B)<->v959(VarCurr,B)))))).
% 297.95/296.15  all VarNext (v4280(VarNext)-> (all B (range_63_0(B)-> (v4279(VarNext,B)<->v2356(VarNext,B))))).
% 297.95/296.15  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4280(VarNext)<->v4281(VarNext))).
% 297.95/296.15  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4281(VarNext)<->v4283(VarNext)&v2343(VarNext))).
% 297.95/296.15  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4283(VarNext)<->v2350(VarNext))).
% 297.95/296.15  all VarCurr (v961(VarCurr,bitIndex6)<->v2339(VarCurr,bitIndex6)).
% 297.95/296.15  all VarCurr (-v4277(VarCurr)-> (v2327(VarCurr,bitIndex6)<->$F)).
% 297.95/296.15  all VarCurr (v4277(VarCurr)-> (v2327(VarCurr,bitIndex6)<->v2329(VarCurr))).
% 297.95/296.15  all VarCurr (v4277(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 297.95/296.15  all VarCurr (v963(VarCurr,bitIndex6)<->v2325(VarCurr,bitIndex6)).
% 297.95/296.15  all VarCurr (-v4275(VarCurr)-> (v965(VarCurr,bitIndex6)<->$F)).
% 297.95/296.15  all VarCurr (v4275(VarCurr)-> (v965(VarCurr,bitIndex6)<->v2201(VarCurr))).
% 297.95/296.15  all VarCurr (v4275(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 297.95/296.15  all VarCurr (v797(VarCurr,bitIndex7)<->v799(VarCurr,bitIndex7)).
% 297.95/296.15  all VarCurr (v799(VarCurr,bitIndex7)<->v2593(VarCurr,bitIndex7)).
% 297.95/296.15  all VarCurr (v801(VarCurr,bitIndex7)<->v2592(VarCurr,bitIndex7)).
% 297.95/296.15  all VarCurr (v2381(VarCurr,bitIndex7)<->v2383(VarCurr,bitIndex7)).
% 297.95/296.15  all VarCurr (v2383(VarCurr,bitIndex7)<->v2385(VarCurr,bitIndex7)).
% 297.95/296.15  all VarCurr (v2385(VarCurr,bitIndex7)<->v2387(VarCurr,bitIndex7)).
% 297.95/296.15  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4266(VarNext)-> (v2387(VarNext,bitIndex7)<->v2387(VarCurr,bitIndex7)))).
% 297.95/296.15  all VarNext (v4266(VarNext)-> (v2387(VarNext,bitIndex7)<->v4268(VarNext))).
% 297.95/296.15  all VarCurr (v4268(VarCurr)<->v2389(VarCurr)&v4269(VarCurr)).
% 297.95/296.15  all VarCurr (v4269(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1880))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1879))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1878))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1877))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1876))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1875))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1874))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1873))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1872))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1871))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1870))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1869))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1868))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1867))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1866))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1865))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1864))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1863))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1862))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1861))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1860))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1859))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1858))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1857))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1856))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1855))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1854))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1853))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1852))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1851))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1850))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1849))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1848))).
% 297.95/296.15  all VarCurr (v4266(VarCurr)<->v4267(VarCurr)&v2585(VarCurr)).
% 297.95/296.15  all VarCurr (-v4267(VarCurr)<->v2547(VarCurr)).
% 297.95/296.15  all VarNext (v803(VarNext,bitIndex7)<->v4257(VarNext,bitIndex7)).
% 297.95/296.15  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4259(VarNext)-> (all B (range_63_0(B)-> (v4257(VarNext,B)<->v803(VarCurr,B)))))).
% 297.95/296.15  all VarNext (v4259(VarNext)-> (all B (range_63_0(B)-> (v4257(VarNext,B)<->v2377(VarNext,B))))).
% 297.95/296.15  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4259(VarNext)<->v4260(VarNext)&v2370(VarNext))).
% 297.95/296.16  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4260(VarNext)<->v4262(VarNext)&v2343(VarNext))).
% 297.95/296.16  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4262(VarNext)<->v2350(VarNext))).
% 297.95/296.16  all VarNext (v959(VarNext,bitIndex7)<->v4249(VarNext,bitIndex7)).
% 297.95/296.16  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4250(VarNext)-> (all B (range_63_0(B)-> (v4249(VarNext,B)<->v959(VarCurr,B)))))).
% 297.95/296.16  all VarNext (v4250(VarNext)-> (all B (range_63_0(B)-> (v4249(VarNext,B)<->v2356(VarNext,B))))).
% 297.95/296.16  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4250(VarNext)<->v4251(VarNext))).
% 297.95/296.16  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4251(VarNext)<->v4253(VarNext)&v2343(VarNext))).
% 297.95/296.16  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4253(VarNext)<->v2350(VarNext))).
% 297.95/296.16  all VarCurr (v961(VarCurr,bitIndex7)<->v2339(VarCurr,bitIndex7)).
% 297.95/296.16  all VarCurr (-v4247(VarCurr)-> (v2327(VarCurr,bitIndex7)<->$F)).
% 297.95/296.16  all VarCurr (v4247(VarCurr)-> (v2327(VarCurr,bitIndex7)<->v2329(VarCurr))).
% 297.95/296.16  all VarCurr (v4247(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 297.95/296.16  all VarCurr (v963(VarCurr,bitIndex7)<->v2325(VarCurr,bitIndex7)).
% 297.95/296.16  all VarCurr (-v4245(VarCurr)-> (v965(VarCurr,bitIndex7)<->$F)).
% 297.95/296.16  all VarCurr (v4245(VarCurr)-> (v965(VarCurr,bitIndex7)<->v2201(VarCurr))).
% 297.95/296.16  all VarCurr (v4245(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 297.95/296.16  all VarCurr (v797(VarCurr,bitIndex8)<->v799(VarCurr,bitIndex8)).
% 297.95/296.16  all VarCurr (v799(VarCurr,bitIndex8)<->v2593(VarCurr,bitIndex8)).
% 297.95/296.16  all VarCurr (v801(VarCurr,bitIndex8)<->v2592(VarCurr,bitIndex8)).
% 297.95/296.16  all VarCurr (v2381(VarCurr,bitIndex8)<->v2383(VarCurr,bitIndex8)).
% 297.95/296.16  all VarCurr (v2383(VarCurr,bitIndex8)<->v2385(VarCurr,bitIndex8)).
% 297.95/296.16  all VarCurr (v2385(VarCurr,bitIndex8)<->v2387(VarCurr,bitIndex8)).
% 297.95/296.16  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4236(VarNext)-> (v2387(VarNext,bitIndex8)<->v2387(VarCurr,bitIndex8)))).
% 297.95/296.16  all VarNext (v4236(VarNext)-> (v2387(VarNext,bitIndex8)<->v4238(VarNext))).
% 297.95/296.16  all VarCurr (v4238(VarCurr)<->v2389(VarCurr)&v4239(VarCurr)).
% 297.95/296.16  all VarCurr (v4239(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1847))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1846))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1845))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1844))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1843))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1842))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1841))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1840))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1839))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1838))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1837))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1836))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1835))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1834))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1833))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1832))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1831))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1830))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1829))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1828))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1827))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1826))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1825))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1824))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1823))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1822))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1821))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1820))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1819))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1818))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1817))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1816))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1815))).
% 297.95/296.17  all VarCurr (v4236(VarCurr)<->v4237(VarCurr)&v2585(VarCurr)).
% 297.95/296.17  all VarCurr (-v4237(VarCurr)<->v2547(VarCurr)).
% 297.95/296.17  all VarNext (v803(VarNext,bitIndex8)<->v4227(VarNext,bitIndex8)).
% 297.95/296.17  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4229(VarNext)-> (all B (range_63_0(B)-> (v4227(VarNext,B)<->v803(VarCurr,B)))))).
% 297.95/296.17  all VarNext (v4229(VarNext)-> (all B (range_63_0(B)-> (v4227(VarNext,B)<->v2377(VarNext,B))))).
% 297.95/296.17  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4229(VarNext)<->v4230(VarNext)&v2370(VarNext))).
% 297.95/296.17  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4230(VarNext)<->v4232(VarNext)&v2343(VarNext))).
% 297.95/296.17  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4232(VarNext)<->v2350(VarNext))).
% 297.95/296.17  all VarNext (v959(VarNext,bitIndex8)<->v4219(VarNext,bitIndex8)).
% 297.95/296.17  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4220(VarNext)-> (all B (range_63_0(B)-> (v4219(VarNext,B)<->v959(VarCurr,B)))))).
% 297.95/296.17  all VarNext (v4220(VarNext)-> (all B (range_63_0(B)-> (v4219(VarNext,B)<->v2356(VarNext,B))))).
% 297.95/296.17  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4220(VarNext)<->v4221(VarNext))).
% 297.95/296.17  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4221(VarNext)<->v4223(VarNext)&v2343(VarNext))).
% 297.95/296.17  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4223(VarNext)<->v2350(VarNext))).
% 297.95/296.17  all VarCurr (v961(VarCurr,bitIndex8)<->v2339(VarCurr,bitIndex8)).
% 297.95/296.17  all VarCurr (-v4217(VarCurr)-> (v2327(VarCurr,bitIndex8)<->$F)).
% 297.95/296.17  all VarCurr (v4217(VarCurr)-> (v2327(VarCurr,bitIndex8)<->v2329(VarCurr))).
% 297.95/296.17  all VarCurr (v4217(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 297.95/296.17  all VarCurr (v963(VarCurr,bitIndex8)<->v2325(VarCurr,bitIndex8)).
% 297.95/296.17  all VarCurr (-v4215(VarCurr)-> (v965(VarCurr,bitIndex8)<->$F)).
% 297.95/296.17  all VarCurr (v4215(VarCurr)-> (v965(VarCurr,bitIndex8)<->v2201(VarCurr))).
% 297.95/296.17  all VarCurr (v4215(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 297.95/296.17  all VarCurr (v797(VarCurr,bitIndex9)<->v799(VarCurr,bitIndex9)).
% 297.95/296.17  all VarCurr (v799(VarCurr,bitIndex9)<->v2593(VarCurr,bitIndex9)).
% 297.95/296.17  all VarCurr (v801(VarCurr,bitIndex9)<->v2592(VarCurr,bitIndex9)).
% 297.95/296.17  all VarCurr (v2381(VarCurr,bitIndex9)<->v2383(VarCurr,bitIndex9)).
% 297.95/296.17  all VarCurr (v2383(VarCurr,bitIndex9)<->v2385(VarCurr,bitIndex9)).
% 297.95/296.17  all VarCurr (v2385(VarCurr,bitIndex9)<->v2387(VarCurr,bitIndex9)).
% 297.95/296.17  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4206(VarNext)-> (v2387(VarNext,bitIndex9)<->v2387(VarCurr,bitIndex9)))).
% 297.95/296.17  all VarNext (v4206(VarNext)-> (v2387(VarNext,bitIndex9)<->v4208(VarNext))).
% 297.95/296.17  all VarCurr (v4208(VarCurr)<->v2389(VarCurr)&v4209(VarCurr)).
% 297.95/296.17  all VarCurr (v4209(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1814))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1813))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1812))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1811))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1810))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1809))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1808))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1807))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1806))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1805))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1804))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1803))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1802))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1801))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1800))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1799))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1798))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1797))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1796))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1795))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1794))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1793))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1792))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1791))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1790))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1789))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1788))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1787))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1786))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1785))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1784))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1783))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1782))).
% 297.95/296.18  all VarCurr (v4206(VarCurr)<->v4207(VarCurr)&v2585(VarCurr)).
% 297.95/296.18  all VarCurr (-v4207(VarCurr)<->v2547(VarCurr)).
% 297.95/296.18  all VarNext (v803(VarNext,bitIndex9)<->v4197(VarNext,bitIndex9)).
% 297.95/296.18  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4199(VarNext)-> (all B (range_63_0(B)-> (v4197(VarNext,B)<->v803(VarCurr,B)))))).
% 297.95/296.18  all VarNext (v4199(VarNext)-> (all B (range_63_0(B)-> (v4197(VarNext,B)<->v2377(VarNext,B))))).
% 297.95/296.18  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4199(VarNext)<->v4200(VarNext)&v2370(VarNext))).
% 297.95/296.18  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4200(VarNext)<->v4202(VarNext)&v2343(VarNext))).
% 297.95/296.18  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4202(VarNext)<->v2350(VarNext))).
% 297.95/296.18  all VarNext (v959(VarNext,bitIndex9)<->v4189(VarNext,bitIndex9)).
% 297.95/296.18  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4190(VarNext)-> (all B (range_63_0(B)-> (v4189(VarNext,B)<->v959(VarCurr,B)))))).
% 297.95/296.18  all VarNext (v4190(VarNext)-> (all B (range_63_0(B)-> (v4189(VarNext,B)<->v2356(VarNext,B))))).
% 297.95/296.18  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4190(VarNext)<->v4191(VarNext))).
% 297.95/296.18  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4191(VarNext)<->v4193(VarNext)&v2343(VarNext))).
% 297.95/296.18  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4193(VarNext)<->v2350(VarNext))).
% 297.95/296.18  all VarCurr (v961(VarCurr,bitIndex9)<->v2339(VarCurr,bitIndex9)).
% 297.95/296.18  all VarCurr (-v4187(VarCurr)-> (v2327(VarCurr,bitIndex9)<->$F)).
% 297.95/296.18  all VarCurr (v4187(VarCurr)-> (v2327(VarCurr,bitIndex9)<->v2329(VarCurr))).
% 297.95/296.18  all VarCurr (v4187(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 297.95/296.18  all VarCurr (v963(VarCurr,bitIndex9)<->v2325(VarCurr,bitIndex9)).
% 297.95/296.18  all VarCurr (-v4185(VarCurr)-> (v965(VarCurr,bitIndex9)<->$F)).
% 297.95/296.18  all VarCurr (v4185(VarCurr)-> (v965(VarCurr,bitIndex9)<->v2201(VarCurr))).
% 297.95/296.18  all VarCurr (v4185(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 297.95/296.18  all VarCurr (v797(VarCurr,bitIndex10)<->v799(VarCurr,bitIndex10)).
% 297.95/296.18  all VarCurr (v799(VarCurr,bitIndex10)<->v2593(VarCurr,bitIndex10)).
% 297.95/296.18  all VarCurr (v801(VarCurr,bitIndex10)<->v2592(VarCurr,bitIndex10)).
% 297.95/296.18  all VarCurr (v2381(VarCurr,bitIndex10)<->v2383(VarCurr,bitIndex10)).
% 297.95/296.18  all VarCurr (v2383(VarCurr,bitIndex10)<->v2385(VarCurr,bitIndex10)).
% 297.95/296.18  all VarCurr (v2385(VarCurr,bitIndex10)<->v2387(VarCurr,bitIndex10)).
% 297.95/296.18  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4176(VarNext)-> (v2387(VarNext,bitIndex10)<->v2387(VarCurr,bitIndex10)))).
% 297.95/296.18  all VarNext (v4176(VarNext)-> (v2387(VarNext,bitIndex10)<->v4178(VarNext))).
% 297.95/296.18  all VarCurr (v4178(VarCurr)<->v2389(VarCurr)&v4179(VarCurr)).
% 297.95/296.18  all VarCurr (v4179(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1781))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1780))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1779))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1778))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1777))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1776))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1775))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1774))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1773))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1772))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1771))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1770))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1769))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1768))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1767))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1766))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1765))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1764))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1763))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1762))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1761))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1760))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1759))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1758))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1757))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1756))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1755))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1754))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1753))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1752))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1751))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1750))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1749))).
% 297.99/296.18  all VarCurr (v4176(VarCurr)<->v4177(VarCurr)&v2585(VarCurr)).
% 297.99/296.18  all VarCurr (-v4177(VarCurr)<->v2547(VarCurr)).
% 297.99/296.18  all VarNext (v803(VarNext,bitIndex10)<->v4167(VarNext,bitIndex10)).
% 297.99/296.18  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4169(VarNext)-> (all B (range_63_0(B)-> (v4167(VarNext,B)<->v803(VarCurr,B)))))).
% 297.99/296.18  all VarNext (v4169(VarNext)-> (all B (range_63_0(B)-> (v4167(VarNext,B)<->v2377(VarNext,B))))).
% 297.99/296.18  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4169(VarNext)<->v4170(VarNext)&v2370(VarNext))).
% 297.99/296.18  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4170(VarNext)<->v4172(VarNext)&v2343(VarNext))).
% 297.99/296.18  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4172(VarNext)<->v2350(VarNext))).
% 297.99/296.18  all VarNext (v959(VarNext,bitIndex10)<->v4159(VarNext,bitIndex10)).
% 297.99/296.18  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4160(VarNext)-> (all B (range_63_0(B)-> (v4159(VarNext,B)<->v959(VarCurr,B)))))).
% 297.99/296.18  all VarNext (v4160(VarNext)-> (all B (range_63_0(B)-> (v4159(VarNext,B)<->v2356(VarNext,B))))).
% 297.99/296.18  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4160(VarNext)<->v4161(VarNext))).
% 297.99/296.18  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4161(VarNext)<->v4163(VarNext)&v2343(VarNext))).
% 297.99/296.18  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4163(VarNext)<->v2350(VarNext))).
% 297.99/296.18  all VarCurr (v961(VarCurr,bitIndex10)<->v2339(VarCurr,bitIndex10)).
% 297.99/296.18  all VarCurr (-v4157(VarCurr)-> (v2327(VarCurr,bitIndex10)<->$F)).
% 297.99/296.18  all VarCurr (v4157(VarCurr)-> (v2327(VarCurr,bitIndex10)<->v2329(VarCurr))).
% 297.99/296.18  all VarCurr (v4157(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 297.99/296.18  all VarCurr (v963(VarCurr,bitIndex10)<->v2325(VarCurr,bitIndex10)).
% 297.99/296.18  all VarCurr (-v4155(VarCurr)-> (v965(VarCurr,bitIndex10)<->$F)).
% 297.99/296.18  all VarCurr (v4155(VarCurr)-> (v965(VarCurr,bitIndex10)<->v2201(VarCurr))).
% 297.99/296.18  all VarCurr (v4155(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 297.99/296.18  all VarCurr (v797(VarCurr,bitIndex11)<->v799(VarCurr,bitIndex11)).
% 297.99/296.18  all VarCurr (v799(VarCurr,bitIndex11)<->v2593(VarCurr,bitIndex11)).
% 297.99/296.18  all VarCurr (v801(VarCurr,bitIndex11)<->v2592(VarCurr,bitIndex11)).
% 297.99/296.18  all VarCurr (v2381(VarCurr,bitIndex11)<->v2383(VarCurr,bitIndex11)).
% 297.99/296.18  all VarCurr (v2383(VarCurr,bitIndex11)<->v2385(VarCurr,bitIndex11)).
% 297.99/296.18  all VarCurr (v2385(VarCurr,bitIndex11)<->v2387(VarCurr,bitIndex11)).
% 297.99/296.18  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4146(VarNext)-> (v2387(VarNext,bitIndex11)<->v2387(VarCurr,bitIndex11)))).
% 297.99/296.19  all VarNext (v4146(VarNext)-> (v2387(VarNext,bitIndex11)<->v4148(VarNext))).
% 297.99/296.19  all VarCurr (v4148(VarCurr)<->v2389(VarCurr)&v4149(VarCurr)).
% 297.99/296.19  all VarCurr (v4149(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1748))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1747))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1746))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1745))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1744))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1743))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1742))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1741))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1740))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1739))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1738))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1737))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1736))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1735))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1734))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1733))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1732))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1731))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1730))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1729))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1728))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1727))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1726))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1725))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1724))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1723))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1722))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1721))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1720))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1719))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1718))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1717))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1716))).
% 297.99/296.19  all VarCurr (v4146(VarCurr)<->v4147(VarCurr)&v2585(VarCurr)).
% 297.99/296.19  all VarCurr (-v4147(VarCurr)<->v2547(VarCurr)).
% 297.99/296.19  all VarNext (v803(VarNext,bitIndex11)<->v4137(VarNext,bitIndex11)).
% 297.99/296.19  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4139(VarNext)-> (all B (range_63_0(B)-> (v4137(VarNext,B)<->v803(VarCurr,B)))))).
% 297.99/296.19  all VarNext (v4139(VarNext)-> (all B (range_63_0(B)-> (v4137(VarNext,B)<->v2377(VarNext,B))))).
% 297.99/296.19  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4139(VarNext)<->v4140(VarNext)&v2370(VarNext))).
% 297.99/296.19  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4140(VarNext)<->v4142(VarNext)&v2343(VarNext))).
% 297.99/296.19  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4142(VarNext)<->v2350(VarNext))).
% 297.99/296.19  all VarNext (v959(VarNext,bitIndex11)<->v4129(VarNext,bitIndex11)).
% 297.99/296.19  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4130(VarNext)-> (all B (range_63_0(B)-> (v4129(VarNext,B)<->v959(VarCurr,B)))))).
% 297.99/296.19  all VarNext (v4130(VarNext)-> (all B (range_63_0(B)-> (v4129(VarNext,B)<->v2356(VarNext,B))))).
% 297.99/296.19  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4130(VarNext)<->v4131(VarNext))).
% 297.99/296.19  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4131(VarNext)<->v4133(VarNext)&v2343(VarNext))).
% 297.99/296.19  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4133(VarNext)<->v2350(VarNext))).
% 297.99/296.19  all VarCurr (v961(VarCurr,bitIndex11)<->v2339(VarCurr,bitIndex11)).
% 297.99/296.19  all VarCurr (-v4127(VarCurr)-> (v2327(VarCurr,bitIndex11)<->$F)).
% 297.99/296.19  all VarCurr (v4127(VarCurr)-> (v2327(VarCurr,bitIndex11)<->v2329(VarCurr))).
% 297.99/296.19  all VarCurr (v4127(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 297.99/296.19  all VarCurr (v963(VarCurr,bitIndex11)<->v2325(VarCurr,bitIndex11)).
% 297.99/296.19  all VarCurr (-v4125(VarCurr)-> (v965(VarCurr,bitIndex11)<->$F)).
% 297.99/296.19  all VarCurr (v4125(VarCurr)-> (v965(VarCurr,bitIndex11)<->v2201(VarCurr))).
% 297.99/296.20  all VarCurr (v4125(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 297.99/296.20  all VarCurr (v797(VarCurr,bitIndex12)<->v799(VarCurr,bitIndex12)).
% 297.99/296.20  all VarCurr (v799(VarCurr,bitIndex12)<->v2593(VarCurr,bitIndex12)).
% 297.99/296.20  all VarCurr (v801(VarCurr,bitIndex12)<->v2592(VarCurr,bitIndex12)).
% 297.99/296.20  all VarCurr (v2381(VarCurr,bitIndex12)<->v2383(VarCurr,bitIndex12)).
% 297.99/296.20  all VarCurr (v2383(VarCurr,bitIndex12)<->v2385(VarCurr,bitIndex12)).
% 297.99/296.20  all VarCurr (v2385(VarCurr,bitIndex12)<->v2387(VarCurr,bitIndex12)).
% 297.99/296.20  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4116(VarNext)-> (v2387(VarNext,bitIndex12)<->v2387(VarCurr,bitIndex12)))).
% 297.99/296.20  all VarNext (v4116(VarNext)-> (v2387(VarNext,bitIndex12)<->v4118(VarNext))).
% 297.99/296.20  all VarCurr (v4118(VarCurr)<->v2389(VarCurr)&v4119(VarCurr)).
% 297.99/296.20  all VarCurr (v4119(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1715))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1714))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1713))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1712))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1711))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1710))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1709))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1708))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1707))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1706))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1705))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1704))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1703))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1702))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1701))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1700))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1699))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1698))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1697))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1696))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1695))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1694))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1693))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1692))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1691))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1690))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1689))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1688))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1687))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1686))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1685))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1684))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1683))).
% 297.99/296.20  all VarCurr (v4116(VarCurr)<->v4117(VarCurr)&v2585(VarCurr)).
% 297.99/296.20  all VarCurr (-v4117(VarCurr)<->v2547(VarCurr)).
% 297.99/296.20  all VarNext (v803(VarNext,bitIndex12)<->v4107(VarNext,bitIndex12)).
% 297.99/296.20  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4109(VarNext)-> (all B (range_63_0(B)-> (v4107(VarNext,B)<->v803(VarCurr,B)))))).
% 297.99/296.20  all VarNext (v4109(VarNext)-> (all B (range_63_0(B)-> (v4107(VarNext,B)<->v2377(VarNext,B))))).
% 297.99/296.20  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4109(VarNext)<->v4110(VarNext)&v2370(VarNext))).
% 297.99/296.20  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4110(VarNext)<->v4112(VarNext)&v2343(VarNext))).
% 297.99/296.20  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4112(VarNext)<->v2350(VarNext))).
% 297.99/296.20  all VarNext (v959(VarNext,bitIndex12)<->v4099(VarNext,bitIndex12)).
% 297.99/296.20  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4100(VarNext)-> (all B (range_63_0(B)-> (v4099(VarNext,B)<->v959(VarCurr,B)))))).
% 297.99/296.20  all VarNext (v4100(VarNext)-> (all B (range_63_0(B)-> (v4099(VarNext,B)<->v2356(VarNext,B))))).
% 297.99/296.20  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4100(VarNext)<->v4101(VarNext))).
% 297.99/296.20  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4101(VarNext)<->v4103(VarNext)&v2343(VarNext))).
% 297.99/296.20  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4103(VarNext)<->v2350(VarNext))).
% 297.99/296.20  all VarCurr (v961(VarCurr,bitIndex12)<->v2339(VarCurr,bitIndex12)).
% 297.99/296.20  all VarCurr (-v4097(VarCurr)-> (v2327(VarCurr,bitIndex12)<->$F)).
% 297.99/296.20  all VarCurr (v4097(VarCurr)-> (v2327(VarCurr,bitIndex12)<->v2329(VarCurr))).
% 297.99/296.20  all VarCurr (v4097(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 297.99/296.20  all VarCurr (v963(VarCurr,bitIndex12)<->v2325(VarCurr,bitIndex12)).
% 297.99/296.20  all VarCurr (-v4095(VarCurr)-> (v965(VarCurr,bitIndex12)<->$F)).
% 297.99/296.20  all VarCurr (v4095(VarCurr)-> (v965(VarCurr,bitIndex12)<->v2201(VarCurr))).
% 297.99/296.20  all VarCurr (v4095(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 297.99/296.20  all VarCurr (v797(VarCurr,bitIndex13)<->v799(VarCurr,bitIndex13)).
% 297.99/296.20  all VarCurr (v799(VarCurr,bitIndex13)<->v2593(VarCurr,bitIndex13)).
% 297.99/296.20  all VarCurr (v801(VarCurr,bitIndex13)<->v2592(VarCurr,bitIndex13)).
% 297.99/296.20  all VarCurr (v2381(VarCurr,bitIndex13)<->v2383(VarCurr,bitIndex13)).
% 297.99/296.20  all VarCurr (v2383(VarCurr,bitIndex13)<->v2385(VarCurr,bitIndex13)).
% 297.99/296.20  all VarCurr (v2385(VarCurr,bitIndex13)<->v2387(VarCurr,bitIndex13)).
% 297.99/296.20  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4086(VarNext)-> (v2387(VarNext,bitIndex13)<->v2387(VarCurr,bitIndex13)))).
% 297.99/296.20  all VarNext (v4086(VarNext)-> (v2387(VarNext,bitIndex13)<->v4088(VarNext))).
% 297.99/296.20  all VarCurr (v4088(VarCurr)<->v2389(VarCurr)&v4089(VarCurr)).
% 297.99/296.20  all VarCurr (v4089(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1682))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1681))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1680))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1679))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1678))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1677))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1676))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1675))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1674))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1673))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1672))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1671))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1670))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1669))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1668))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1667))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1666))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1665))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1664))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1663))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1662))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1661))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1660))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1659))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1658))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1657))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1656))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1655))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1654))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1653))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1652))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1651))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1650))).
% 297.99/296.20  all VarCurr (v4086(VarCurr)<->v4087(VarCurr)&v2585(VarCurr)).
% 297.99/296.20  all VarCurr (-v4087(VarCurr)<->v2547(VarCurr)).
% 297.99/296.20  all VarNext (v803(VarNext,bitIndex13)<->v4077(VarNext,bitIndex13)).
% 297.99/296.20  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4079(VarNext)-> (all B (range_63_0(B)-> (v4077(VarNext,B)<->v803(VarCurr,B)))))).
% 297.99/296.20  all VarNext (v4079(VarNext)-> (all B (range_63_0(B)-> (v4077(VarNext,B)<->v2377(VarNext,B))))).
% 297.99/296.20  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4079(VarNext)<->v4080(VarNext)&v2370(VarNext))).
% 297.99/296.21  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4080(VarNext)<->v4082(VarNext)&v2343(VarNext))).
% 297.99/296.21  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4082(VarNext)<->v2350(VarNext))).
% 297.99/296.21  all VarNext (v959(VarNext,bitIndex13)<->v4069(VarNext,bitIndex13)).
% 297.99/296.21  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4070(VarNext)-> (all B (range_63_0(B)-> (v4069(VarNext,B)<->v959(VarCurr,B)))))).
% 297.99/296.21  all VarNext (v4070(VarNext)-> (all B (range_63_0(B)-> (v4069(VarNext,B)<->v2356(VarNext,B))))).
% 297.99/296.21  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4070(VarNext)<->v4071(VarNext))).
% 297.99/296.21  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4071(VarNext)<->v4073(VarNext)&v2343(VarNext))).
% 297.99/296.21  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4073(VarNext)<->v2350(VarNext))).
% 297.99/296.21  all VarCurr (v961(VarCurr,bitIndex13)<->v2339(VarCurr,bitIndex13)).
% 297.99/296.21  all VarCurr (-v4067(VarCurr)-> (v2327(VarCurr,bitIndex13)<->$F)).
% 297.99/296.21  all VarCurr (v4067(VarCurr)-> (v2327(VarCurr,bitIndex13)<->v2329(VarCurr))).
% 297.99/296.21  all VarCurr (v4067(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 297.99/296.21  all VarCurr (v963(VarCurr,bitIndex13)<->v2325(VarCurr,bitIndex13)).
% 297.99/296.21  all VarCurr (-v4065(VarCurr)-> (v965(VarCurr,bitIndex13)<->$F)).
% 297.99/296.21  all VarCurr (v4065(VarCurr)-> (v965(VarCurr,bitIndex13)<->v2201(VarCurr))).
% 297.99/296.21  all VarCurr (v4065(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 297.99/296.21  all VarCurr (v797(VarCurr,bitIndex14)<->v799(VarCurr,bitIndex14)).
% 297.99/296.21  all VarCurr (v799(VarCurr,bitIndex14)<->v2593(VarCurr,bitIndex14)).
% 297.99/296.21  all VarCurr (v801(VarCurr,bitIndex14)<->v2592(VarCurr,bitIndex14)).
% 297.99/296.21  all VarCurr (v2381(VarCurr,bitIndex14)<->v2383(VarCurr,bitIndex14)).
% 297.99/296.21  all VarCurr (v2383(VarCurr,bitIndex14)<->v2385(VarCurr,bitIndex14)).
% 297.99/296.21  all VarCurr (v2385(VarCurr,bitIndex14)<->v2387(VarCurr,bitIndex14)).
% 297.99/296.21  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4056(VarNext)-> (v2387(VarNext,bitIndex14)<->v2387(VarCurr,bitIndex14)))).
% 297.99/296.21  all VarNext (v4056(VarNext)-> (v2387(VarNext,bitIndex14)<->v4058(VarNext))).
% 297.99/296.21  all VarCurr (v4058(VarCurr)<->v2389(VarCurr)&v4059(VarCurr)).
% 297.99/296.21  all VarCurr (v4059(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1649))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1648))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1647))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1646))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1645))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1644))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1643))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1642))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1641))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1640))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1639))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1638))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1637))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1636))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1635))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1634))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1633))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1632))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1631))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1630))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1629))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1628))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1627))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1626))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1625))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1624))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1623))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1622))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1621))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1620))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1619))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1618))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1617))).
% 297.99/296.22  all VarCurr (v4056(VarCurr)<->v4057(VarCurr)&v2585(VarCurr)).
% 297.99/296.22  all VarCurr (-v4057(VarCurr)<->v2547(VarCurr)).
% 297.99/296.22  all VarNext (v803(VarNext,bitIndex14)<->v4047(VarNext,bitIndex14)).
% 297.99/296.22  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4049(VarNext)-> (all B (range_63_0(B)-> (v4047(VarNext,B)<->v803(VarCurr,B)))))).
% 297.99/296.22  all VarNext (v4049(VarNext)-> (all B (range_63_0(B)-> (v4047(VarNext,B)<->v2377(VarNext,B))))).
% 297.99/296.22  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4049(VarNext)<->v4050(VarNext)&v2370(VarNext))).
% 297.99/296.22  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4050(VarNext)<->v4052(VarNext)&v2343(VarNext))).
% 297.99/296.22  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4052(VarNext)<->v2350(VarNext))).
% 297.99/296.22  all VarNext (v959(VarNext,bitIndex14)<->v4039(VarNext,bitIndex14)).
% 297.99/296.22  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4040(VarNext)-> (all B (range_63_0(B)-> (v4039(VarNext,B)<->v959(VarCurr,B)))))).
% 297.99/296.22  all VarNext (v4040(VarNext)-> (all B (range_63_0(B)-> (v4039(VarNext,B)<->v2356(VarNext,B))))).
% 297.99/296.22  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4040(VarNext)<->v4041(VarNext))).
% 297.99/296.22  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4041(VarNext)<->v4043(VarNext)&v2343(VarNext))).
% 297.99/296.22  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4043(VarNext)<->v2350(VarNext))).
% 297.99/296.22  all VarCurr (v961(VarCurr,bitIndex14)<->v2339(VarCurr,bitIndex14)).
% 297.99/296.22  all VarCurr (-v4037(VarCurr)-> (v2327(VarCurr,bitIndex14)<->$F)).
% 297.99/296.22  all VarCurr (v4037(VarCurr)-> (v2327(VarCurr,bitIndex14)<->v2329(VarCurr))).
% 297.99/296.22  all VarCurr (v4037(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 297.99/296.22  all VarCurr (v963(VarCurr,bitIndex14)<->v2325(VarCurr,bitIndex14)).
% 297.99/296.22  all VarCurr (-v4035(VarCurr)-> (v965(VarCurr,bitIndex14)<->$F)).
% 297.99/296.22  all VarCurr (v4035(VarCurr)-> (v965(VarCurr,bitIndex14)<->v2201(VarCurr))).
% 297.99/296.22  all VarCurr (v4035(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 297.99/296.22  all VarCurr (v797(VarCurr,bitIndex15)<->v799(VarCurr,bitIndex15)).
% 297.99/296.22  all VarCurr (v799(VarCurr,bitIndex15)<->v2593(VarCurr,bitIndex15)).
% 297.99/296.22  all VarCurr (v801(VarCurr,bitIndex15)<->v2592(VarCurr,bitIndex15)).
% 297.99/296.22  all VarCurr (v2381(VarCurr,bitIndex15)<->v2383(VarCurr,bitIndex15)).
% 297.99/296.22  all VarCurr (v2383(VarCurr,bitIndex15)<->v2385(VarCurr,bitIndex15)).
% 297.99/296.22  all VarCurr (v2385(VarCurr,bitIndex15)<->v2387(VarCurr,bitIndex15)).
% 297.99/296.22  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4026(VarNext)-> (v2387(VarNext,bitIndex15)<->v2387(VarCurr,bitIndex15)))).
% 297.99/296.22  all VarNext (v4026(VarNext)-> (v2387(VarNext,bitIndex15)<->v4028(VarNext))).
% 297.99/296.22  all VarCurr (v4028(VarCurr)<->v2389(VarCurr)&v4029(VarCurr)).
% 297.99/296.22  all VarCurr (v4029(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1616))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1615))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1614))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1613))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1612))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1611))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1610))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1609))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1608))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1607))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1606))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1605))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1604))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1603))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1602))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1601))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1600))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1599))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1598))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1597))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1596))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1595))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1594))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1593))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1592))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1591))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1590))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1589))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1588))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1587))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1586))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1585))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1584))).
% 297.99/296.22  all VarCurr (v4026(VarCurr)<->v4027(VarCurr)&v2585(VarCurr)).
% 297.99/296.22  all VarCurr (-v4027(VarCurr)<->v2547(VarCurr)).
% 297.99/296.22  all VarNext (v803(VarNext,bitIndex15)<->v4017(VarNext,bitIndex15)).
% 297.99/296.22  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4019(VarNext)-> (all B (range_63_0(B)-> (v4017(VarNext,B)<->v803(VarCurr,B)))))).
% 297.99/296.22  all VarNext (v4019(VarNext)-> (all B (range_63_0(B)-> (v4017(VarNext,B)<->v2377(VarNext,B))))).
% 297.99/296.22  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4019(VarNext)<->v4020(VarNext)&v2370(VarNext))).
% 297.99/296.22  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4020(VarNext)<->v4022(VarNext)&v2343(VarNext))).
% 297.99/296.22  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4022(VarNext)<->v2350(VarNext))).
% 297.99/296.22  all VarNext (v959(VarNext,bitIndex15)<->v4009(VarNext,bitIndex15)).
% 297.99/296.22  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4010(VarNext)-> (all B (range_63_0(B)-> (v4009(VarNext,B)<->v959(VarCurr,B)))))).
% 297.99/296.22  all VarNext (v4010(VarNext)-> (all B (range_63_0(B)-> (v4009(VarNext,B)<->v2356(VarNext,B))))).
% 297.99/296.22  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4010(VarNext)<->v4011(VarNext))).
% 297.99/296.22  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v4011(VarNext)<->v4013(VarNext)&v2343(VarNext))).
% 297.99/296.22  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v4013(VarNext)<->v2350(VarNext))).
% 297.99/296.22  all VarCurr (v961(VarCurr,bitIndex15)<->v2339(VarCurr,bitIndex15)).
% 297.99/296.22  all VarCurr (-v4007(VarCurr)-> (v2327(VarCurr,bitIndex15)<->$F)).
% 297.99/296.22  all VarCurr (v4007(VarCurr)-> (v2327(VarCurr,bitIndex15)<->v2329(VarCurr))).
% 297.99/296.22  all VarCurr (v4007(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 297.99/296.22  all VarCurr (v963(VarCurr,bitIndex15)<->v2325(VarCurr,bitIndex15)).
% 297.99/296.22  all VarCurr (-v4005(VarCurr)-> (v965(VarCurr,bitIndex15)<->$F)).
% 297.99/296.22  all VarCurr (v4005(VarCurr)-> (v965(VarCurr,bitIndex15)<->v2201(VarCurr))).
% 297.99/296.22  all VarCurr (v4005(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 297.99/296.22  all VarCurr (v797(VarCurr,bitIndex16)<->v799(VarCurr,bitIndex16)).
% 297.99/296.22  all VarCurr (v799(VarCurr,bitIndex16)<->v2593(VarCurr,bitIndex16)).
% 297.99/296.22  all VarCurr (v801(VarCurr,bitIndex16)<->v2592(VarCurr,bitIndex16)).
% 297.99/296.22  all VarCurr (v2381(VarCurr,bitIndex16)<->v2383(VarCurr,bitIndex16)).
% 297.99/296.22  all VarCurr (v2383(VarCurr,bitIndex16)<->v2385(VarCurr,bitIndex16)).
% 297.99/296.22  all VarCurr (v2385(VarCurr,bitIndex16)<->v2387(VarCurr,bitIndex16)).
% 297.99/296.22  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3996(VarNext)-> (v2387(VarNext,bitIndex16)<->v2387(VarCurr,bitIndex16)))).
% 297.99/296.22  all VarNext (v3996(VarNext)-> (v2387(VarNext,bitIndex16)<->v3998(VarNext))).
% 297.99/296.22  all VarCurr (v3998(VarCurr)<->v2389(VarCurr)&v3999(VarCurr)).
% 297.99/296.22  all VarCurr (v3999(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1583))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1582))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1581))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1580))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1579))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1578))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1577))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1576))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1575))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1574))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1573))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1572))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1571))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1570))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1569))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1568))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1567))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1566))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1565))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1564))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1563))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1562))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1561))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1560))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1559))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1558))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1557))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1556))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1555))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1554))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1553))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1552))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1551))).
% 297.99/296.23  all VarCurr (v3996(VarCurr)<->v3997(VarCurr)&v2585(VarCurr)).
% 297.99/296.23  all VarCurr (-v3997(VarCurr)<->v2547(VarCurr)).
% 297.99/296.23  all VarNext (v803(VarNext,bitIndex16)<->v3987(VarNext,bitIndex16)).
% 297.99/296.23  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3989(VarNext)-> (all B (range_63_0(B)-> (v3987(VarNext,B)<->v803(VarCurr,B)))))).
% 297.99/296.23  all VarNext (v3989(VarNext)-> (all B (range_63_0(B)-> (v3987(VarNext,B)<->v2377(VarNext,B))))).
% 297.99/296.23  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3989(VarNext)<->v3990(VarNext)&v2370(VarNext))).
% 297.99/296.23  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3990(VarNext)<->v3992(VarNext)&v2343(VarNext))).
% 297.99/296.23  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3992(VarNext)<->v2350(VarNext))).
% 297.99/296.23  all VarNext (v959(VarNext,bitIndex16)<->v3979(VarNext,bitIndex16)).
% 297.99/296.23  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3980(VarNext)-> (all B (range_63_0(B)-> (v3979(VarNext,B)<->v959(VarCurr,B)))))).
% 297.99/296.23  all VarNext (v3980(VarNext)-> (all B (range_63_0(B)-> (v3979(VarNext,B)<->v2356(VarNext,B))))).
% 297.99/296.23  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3980(VarNext)<->v3981(VarNext))).
% 297.99/296.23  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3981(VarNext)<->v3983(VarNext)&v2343(VarNext))).
% 297.99/296.23  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3983(VarNext)<->v2350(VarNext))).
% 297.99/296.23  all VarCurr (v961(VarCurr,bitIndex16)<->v2339(VarCurr,bitIndex16)).
% 297.99/296.23  all VarCurr (-v3977(VarCurr)-> (v2327(VarCurr,bitIndex16)<->$F)).
% 297.99/296.23  all VarCurr (v3977(VarCurr)-> (v2327(VarCurr,bitIndex16)<->v2329(VarCurr))).
% 297.99/296.23  all VarCurr (v3977(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 297.99/296.23  all VarCurr (v963(VarCurr,bitIndex16)<->v2325(VarCurr,bitIndex16)).
% 297.99/296.23  all VarCurr (-v3975(VarCurr)-> (v965(VarCurr,bitIndex16)<->$F)).
% 297.99/296.23  all VarCurr (v3975(VarCurr)-> (v965(VarCurr,bitIndex16)<->v2201(VarCurr))).
% 297.99/296.23  all VarCurr (v3975(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 297.99/296.23  all VarCurr (v797(VarCurr,bitIndex17)<->v799(VarCurr,bitIndex17)).
% 297.99/296.23  all VarCurr (v799(VarCurr,bitIndex17)<->v2593(VarCurr,bitIndex17)).
% 297.99/296.23  all VarCurr (v801(VarCurr,bitIndex17)<->v2592(VarCurr,bitIndex17)).
% 297.99/296.23  all VarCurr (v2381(VarCurr,bitIndex17)<->v2383(VarCurr,bitIndex17)).
% 297.99/296.23  all VarCurr (v2383(VarCurr,bitIndex17)<->v2385(VarCurr,bitIndex17)).
% 297.99/296.23  all VarCurr (v2385(VarCurr,bitIndex17)<->v2387(VarCurr,bitIndex17)).
% 297.99/296.24  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3966(VarNext)-> (v2387(VarNext,bitIndex17)<->v2387(VarCurr,bitIndex17)))).
% 297.99/296.24  all VarNext (v3966(VarNext)-> (v2387(VarNext,bitIndex17)<->v3968(VarNext))).
% 297.99/296.24  all VarCurr (v3968(VarCurr)<->v2389(VarCurr)&v3969(VarCurr)).
% 297.99/296.24  all VarCurr (v3969(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1550))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1549))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1548))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1547))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1546))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1545))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1544))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1543))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1542))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1541))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1540))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1539))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1538))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1537))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1536))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1535))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1534))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1533))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1532))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1531))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1530))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1529))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1528))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1527))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1526))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1525))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1524))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1523))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1522))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1521))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1520))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1519))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1518))).
% 297.99/296.24  all VarCurr (v3966(VarCurr)<->v3967(VarCurr)&v2585(VarCurr)).
% 297.99/296.24  all VarCurr (-v3967(VarCurr)<->v2547(VarCurr)).
% 297.99/296.24  all VarNext (v803(VarNext,bitIndex17)<->v3957(VarNext,bitIndex17)).
% 297.99/296.24  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3959(VarNext)-> (all B (range_63_0(B)-> (v3957(VarNext,B)<->v803(VarCurr,B)))))).
% 297.99/296.24  all VarNext (v3959(VarNext)-> (all B (range_63_0(B)-> (v3957(VarNext,B)<->v2377(VarNext,B))))).
% 297.99/296.24  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3959(VarNext)<->v3960(VarNext)&v2370(VarNext))).
% 297.99/296.24  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3960(VarNext)<->v3962(VarNext)&v2343(VarNext))).
% 297.99/296.24  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3962(VarNext)<->v2350(VarNext))).
% 297.99/296.24  all VarNext (v959(VarNext,bitIndex17)<->v3949(VarNext,bitIndex17)).
% 297.99/296.24  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3950(VarNext)-> (all B (range_63_0(B)-> (v3949(VarNext,B)<->v959(VarCurr,B)))))).
% 297.99/296.24  all VarNext (v3950(VarNext)-> (all B (range_63_0(B)-> (v3949(VarNext,B)<->v2356(VarNext,B))))).
% 297.99/296.24  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3950(VarNext)<->v3951(VarNext))).
% 297.99/296.24  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3951(VarNext)<->v3953(VarNext)&v2343(VarNext))).
% 297.99/296.24  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3953(VarNext)<->v2350(VarNext))).
% 297.99/296.24  all VarCurr (v961(VarCurr,bitIndex17)<->v2339(VarCurr,bitIndex17)).
% 297.99/296.24  all VarCurr (-v3947(VarCurr)-> (v2327(VarCurr,bitIndex17)<->$F)).
% 297.99/296.24  all VarCurr (v3947(VarCurr)-> (v2327(VarCurr,bitIndex17)<->v2329(VarCurr))).
% 297.99/296.24  all VarCurr (v3947(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 297.99/296.24  all VarCurr (v963(VarCurr,bitIndex17)<->v2325(VarCurr,bitIndex17)).
% 297.99/296.24  all VarCurr (-v3945(VarCurr)-> (v965(VarCurr,bitIndex17)<->$F)).
% 298.05/296.25  all VarCurr (v3945(VarCurr)-> (v965(VarCurr,bitIndex17)<->v2201(VarCurr))).
% 298.05/296.25  all VarCurr (v3945(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.05/296.25  all VarCurr (v797(VarCurr,bitIndex18)<->v799(VarCurr,bitIndex18)).
% 298.05/296.25  all VarCurr (v799(VarCurr,bitIndex18)<->v2593(VarCurr,bitIndex18)).
% 298.05/296.25  all VarCurr (v801(VarCurr,bitIndex18)<->v2592(VarCurr,bitIndex18)).
% 298.05/296.25  all VarCurr (v2381(VarCurr,bitIndex18)<->v2383(VarCurr,bitIndex18)).
% 298.05/296.25  all VarCurr (v2383(VarCurr,bitIndex18)<->v2385(VarCurr,bitIndex18)).
% 298.05/296.25  all VarCurr (v2385(VarCurr,bitIndex18)<->v2387(VarCurr,bitIndex18)).
% 298.05/296.25  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3936(VarNext)-> (v2387(VarNext,bitIndex18)<->v2387(VarCurr,bitIndex18)))).
% 298.05/296.25  all VarNext (v3936(VarNext)-> (v2387(VarNext,bitIndex18)<->v3938(VarNext))).
% 298.05/296.25  all VarCurr (v3938(VarCurr)<->v2389(VarCurr)&v3939(VarCurr)).
% 298.05/296.25  all VarCurr (v3939(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1517))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1516))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1515))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1514))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1513))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1512))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1511))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1510))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1509))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1508))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1507))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1506))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1505))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1504))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1503))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1502))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1501))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1500))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1499))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1498))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1497))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1496))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1495))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1494))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1493))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1492))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1491))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1490))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1489))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1488))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1487))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1486))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1485))).
% 298.05/296.25  all VarCurr (v3936(VarCurr)<->v3937(VarCurr)&v2585(VarCurr)).
% 298.05/296.25  all VarCurr (-v3937(VarCurr)<->v2547(VarCurr)).
% 298.05/296.25  all VarNext (v803(VarNext,bitIndex18)<->v3927(VarNext,bitIndex18)).
% 298.05/296.25  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3929(VarNext)-> (all B (range_63_0(B)-> (v3927(VarNext,B)<->v803(VarCurr,B)))))).
% 298.05/296.25  all VarNext (v3929(VarNext)-> (all B (range_63_0(B)-> (v3927(VarNext,B)<->v2377(VarNext,B))))).
% 298.05/296.25  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3929(VarNext)<->v3930(VarNext)&v2370(VarNext))).
% 298.05/296.25  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3930(VarNext)<->v3932(VarNext)&v2343(VarNext))).
% 298.05/296.25  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3932(VarNext)<->v2350(VarNext))).
% 298.05/296.25  all VarNext (v959(VarNext,bitIndex18)<->v3919(VarNext,bitIndex18)).
% 298.05/296.25  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3920(VarNext)-> (all B (range_63_0(B)-> (v3919(VarNext,B)<->v959(VarCurr,B)))))).
% 298.05/296.25  all VarNext (v3920(VarNext)-> (all B (range_63_0(B)-> (v3919(VarNext,B)<->v2356(VarNext,B))))).
% 298.05/296.25  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3920(VarNext)<->v3921(VarNext))).
% 298.05/296.25  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3921(VarNext)<->v3923(VarNext)&v2343(VarNext))).
% 298.05/296.25  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3923(VarNext)<->v2350(VarNext))).
% 298.05/296.25  all VarCurr (v961(VarCurr,bitIndex18)<->v2339(VarCurr,bitIndex18)).
% 298.05/296.25  all VarCurr (-v3917(VarCurr)-> (v2327(VarCurr,bitIndex18)<->$F)).
% 298.05/296.25  all VarCurr (v3917(VarCurr)-> (v2327(VarCurr,bitIndex18)<->v2329(VarCurr))).
% 298.05/296.25  all VarCurr (v3917(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.05/296.25  all VarCurr (v963(VarCurr,bitIndex18)<->v2325(VarCurr,bitIndex18)).
% 298.05/296.25  all VarCurr (-v3915(VarCurr)-> (v965(VarCurr,bitIndex18)<->$F)).
% 298.05/296.25  all VarCurr (v3915(VarCurr)-> (v965(VarCurr,bitIndex18)<->v2201(VarCurr))).
% 298.05/296.25  all VarCurr (v3915(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.05/296.25  all VarCurr (v797(VarCurr,bitIndex19)<->v799(VarCurr,bitIndex19)).
% 298.05/296.25  all VarCurr (v799(VarCurr,bitIndex19)<->v2593(VarCurr,bitIndex19)).
% 298.05/296.25  all VarCurr (v801(VarCurr,bitIndex19)<->v2592(VarCurr,bitIndex19)).
% 298.05/296.25  all VarCurr (v2381(VarCurr,bitIndex19)<->v2383(VarCurr,bitIndex19)).
% 298.05/296.25  all VarCurr (v2383(VarCurr,bitIndex19)<->v2385(VarCurr,bitIndex19)).
% 298.05/296.25  all VarCurr (v2385(VarCurr,bitIndex19)<->v2387(VarCurr,bitIndex19)).
% 298.05/296.25  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3906(VarNext)-> (v2387(VarNext,bitIndex19)<->v2387(VarCurr,bitIndex19)))).
% 298.05/296.25  all VarNext (v3906(VarNext)-> (v2387(VarNext,bitIndex19)<->v3908(VarNext))).
% 298.05/296.25  all VarCurr (v3908(VarCurr)<->v2389(VarCurr)&v3909(VarCurr)).
% 298.05/296.25  all VarCurr (v3909(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1484))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1483))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1482))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1481))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1480))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1479))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1478))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1477))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1476))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1475))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1474))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1473))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1472))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1471))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1470))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1469))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1468))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1467))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1466))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1465))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1464))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1463))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1462))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1461))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1460))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1459))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1458))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1457))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1456))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1455))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1454))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1453))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1452))).
% 298.05/296.25  all VarCurr (v3906(VarCurr)<->v3907(VarCurr)&v2585(VarCurr)).
% 298.05/296.25  all VarCurr (-v3907(VarCurr)<->v2547(VarCurr)).
% 298.05/296.25  all VarNext (v803(VarNext,bitIndex19)<->v3897(VarNext,bitIndex19)).
% 298.05/296.25  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3899(VarNext)-> (all B (range_63_0(B)-> (v3897(VarNext,B)<->v803(VarCurr,B)))))).
% 298.05/296.25  all VarNext (v3899(VarNext)-> (all B (range_63_0(B)-> (v3897(VarNext,B)<->v2377(VarNext,B))))).
% 298.05/296.25  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3899(VarNext)<->v3900(VarNext)&v2370(VarNext))).
% 298.05/296.26  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3900(VarNext)<->v3902(VarNext)&v2343(VarNext))).
% 298.05/296.26  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3902(VarNext)<->v2350(VarNext))).
% 298.05/296.26  all VarNext (v959(VarNext,bitIndex19)<->v3889(VarNext,bitIndex19)).
% 298.05/296.26  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3890(VarNext)-> (all B (range_63_0(B)-> (v3889(VarNext,B)<->v959(VarCurr,B)))))).
% 298.05/296.26  all VarNext (v3890(VarNext)-> (all B (range_63_0(B)-> (v3889(VarNext,B)<->v2356(VarNext,B))))).
% 298.05/296.26  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3890(VarNext)<->v3891(VarNext))).
% 298.05/296.26  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3891(VarNext)<->v3893(VarNext)&v2343(VarNext))).
% 298.05/296.26  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3893(VarNext)<->v2350(VarNext))).
% 298.05/296.26  all VarCurr (v961(VarCurr,bitIndex19)<->v2339(VarCurr,bitIndex19)).
% 298.05/296.26  all VarCurr (-v3887(VarCurr)-> (v2327(VarCurr,bitIndex19)<->$F)).
% 298.05/296.26  all VarCurr (v3887(VarCurr)-> (v2327(VarCurr,bitIndex19)<->v2329(VarCurr))).
% 298.05/296.26  all VarCurr (v3887(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.05/296.26  all VarCurr (v963(VarCurr,bitIndex19)<->v2325(VarCurr,bitIndex19)).
% 298.05/296.26  all VarCurr (-v3885(VarCurr)-> (v965(VarCurr,bitIndex19)<->$F)).
% 298.05/296.26  all VarCurr (v3885(VarCurr)-> (v965(VarCurr,bitIndex19)<->v2201(VarCurr))).
% 298.05/296.26  all VarCurr (v3885(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.05/296.26  all VarCurr (v797(VarCurr,bitIndex20)<->v799(VarCurr,bitIndex20)).
% 298.05/296.26  all VarCurr (v799(VarCurr,bitIndex20)<->v2593(VarCurr,bitIndex20)).
% 298.05/296.26  all VarCurr (v801(VarCurr,bitIndex20)<->v2592(VarCurr,bitIndex20)).
% 298.05/296.26  all VarCurr (v2381(VarCurr,bitIndex20)<->v2383(VarCurr,bitIndex20)).
% 298.05/296.26  all VarCurr (v2383(VarCurr,bitIndex20)<->v2385(VarCurr,bitIndex20)).
% 298.05/296.26  all VarCurr (v2385(VarCurr,bitIndex20)<->v2387(VarCurr,bitIndex20)).
% 298.05/296.26  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3876(VarNext)-> (v2387(VarNext,bitIndex20)<->v2387(VarCurr,bitIndex20)))).
% 298.05/296.26  all VarNext (v3876(VarNext)-> (v2387(VarNext,bitIndex20)<->v3878(VarNext))).
% 298.05/296.26  all VarCurr (v3878(VarCurr)<->v2389(VarCurr)&v3879(VarCurr)).
% 298.05/296.26  all VarCurr (v3879(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1451))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1450))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1449))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1448))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1447))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1446))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1445))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1444))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1443))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1442))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1441))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1440))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1439))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1438))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1437))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1436))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1435))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1434))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1433))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1432))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1431))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1430))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1429))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1428))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1427))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1426))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1425))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1424))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1423))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1422))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1421))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1420))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1419))).
% 298.07/296.27  all VarCurr (v3876(VarCurr)<->v3877(VarCurr)&v2585(VarCurr)).
% 298.07/296.27  all VarCurr (-v3877(VarCurr)<->v2547(VarCurr)).
% 298.07/296.27  all VarNext (v803(VarNext,bitIndex20)<->v3867(VarNext,bitIndex20)).
% 298.07/296.27  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3869(VarNext)-> (all B (range_63_0(B)-> (v3867(VarNext,B)<->v803(VarCurr,B)))))).
% 298.07/296.27  all VarNext (v3869(VarNext)-> (all B (range_63_0(B)-> (v3867(VarNext,B)<->v2377(VarNext,B))))).
% 298.07/296.27  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3869(VarNext)<->v3870(VarNext)&v2370(VarNext))).
% 298.07/296.27  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3870(VarNext)<->v3872(VarNext)&v2343(VarNext))).
% 298.07/296.27  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3872(VarNext)<->v2350(VarNext))).
% 298.07/296.27  all VarNext (v959(VarNext,bitIndex20)<->v3859(VarNext,bitIndex20)).
% 298.07/296.27  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3860(VarNext)-> (all B (range_63_0(B)-> (v3859(VarNext,B)<->v959(VarCurr,B)))))).
% 298.07/296.27  all VarNext (v3860(VarNext)-> (all B (range_63_0(B)-> (v3859(VarNext,B)<->v2356(VarNext,B))))).
% 298.07/296.27  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3860(VarNext)<->v3861(VarNext))).
% 298.07/296.27  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3861(VarNext)<->v3863(VarNext)&v2343(VarNext))).
% 298.07/296.27  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3863(VarNext)<->v2350(VarNext))).
% 298.07/296.27  all VarCurr (v961(VarCurr,bitIndex20)<->v2339(VarCurr,bitIndex20)).
% 298.07/296.27  all VarCurr (-v3857(VarCurr)-> (v2327(VarCurr,bitIndex20)<->$F)).
% 298.07/296.27  all VarCurr (v3857(VarCurr)-> (v2327(VarCurr,bitIndex20)<->v2329(VarCurr))).
% 298.07/296.27  all VarCurr (v3857(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.07/296.27  all VarCurr (v963(VarCurr,bitIndex20)<->v2325(VarCurr,bitIndex20)).
% 298.07/296.27  all VarCurr (-v3855(VarCurr)-> (v965(VarCurr,bitIndex20)<->$F)).
% 298.07/296.27  all VarCurr (v3855(VarCurr)-> (v965(VarCurr,bitIndex20)<->v2201(VarCurr))).
% 298.07/296.27  all VarCurr (v3855(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.07/296.27  all VarCurr (v797(VarCurr,bitIndex21)<->v799(VarCurr,bitIndex21)).
% 298.07/296.27  all VarCurr (v799(VarCurr,bitIndex21)<->v2593(VarCurr,bitIndex21)).
% 298.07/296.27  all VarCurr (v801(VarCurr,bitIndex21)<->v2592(VarCurr,bitIndex21)).
% 298.07/296.27  all VarCurr (v2381(VarCurr,bitIndex21)<->v2383(VarCurr,bitIndex21)).
% 298.07/296.27  all VarCurr (v2383(VarCurr,bitIndex21)<->v2385(VarCurr,bitIndex21)).
% 298.07/296.27  all VarCurr (v2385(VarCurr,bitIndex21)<->v2387(VarCurr,bitIndex21)).
% 298.07/296.27  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3846(VarNext)-> (v2387(VarNext,bitIndex21)<->v2387(VarCurr,bitIndex21)))).
% 298.07/296.27  all VarNext (v3846(VarNext)-> (v2387(VarNext,bitIndex21)<->v3848(VarNext))).
% 298.07/296.27  all VarCurr (v3848(VarCurr)<->v2389(VarCurr)&v3849(VarCurr)).
% 298.07/296.27  all VarCurr (v3849(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1418))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1417))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1416))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1415))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1414))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1413))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1412))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1411))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1410))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1409))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1408))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1407))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1406))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1405))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1404))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1403))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1402))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1401))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1400))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1399))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1398))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1397))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1396))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1395))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1394))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1393))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1392))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1391))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1390))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1389))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1388))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1387))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1386))).
% 298.07/296.28  all VarCurr (v3846(VarCurr)<->v3847(VarCurr)&v2585(VarCurr)).
% 298.07/296.28  all VarCurr (-v3847(VarCurr)<->v2547(VarCurr)).
% 298.07/296.28  all VarNext (v803(VarNext,bitIndex21)<->v3837(VarNext,bitIndex21)).
% 298.07/296.28  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3839(VarNext)-> (all B (range_63_0(B)-> (v3837(VarNext,B)<->v803(VarCurr,B)))))).
% 298.07/296.28  all VarNext (v3839(VarNext)-> (all B (range_63_0(B)-> (v3837(VarNext,B)<->v2377(VarNext,B))))).
% 298.07/296.28  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3839(VarNext)<->v3840(VarNext)&v2370(VarNext))).
% 298.07/296.28  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3840(VarNext)<->v3842(VarNext)&v2343(VarNext))).
% 298.07/296.28  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3842(VarNext)<->v2350(VarNext))).
% 298.07/296.28  all VarNext (v959(VarNext,bitIndex21)<->v3829(VarNext,bitIndex21)).
% 298.07/296.28  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3830(VarNext)-> (all B (range_63_0(B)-> (v3829(VarNext,B)<->v959(VarCurr,B)))))).
% 298.07/296.28  all VarNext (v3830(VarNext)-> (all B (range_63_0(B)-> (v3829(VarNext,B)<->v2356(VarNext,B))))).
% 298.07/296.28  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3830(VarNext)<->v3831(VarNext))).
% 298.07/296.28  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3831(VarNext)<->v3833(VarNext)&v2343(VarNext))).
% 298.07/296.28  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3833(VarNext)<->v2350(VarNext))).
% 298.07/296.28  all VarCurr (v961(VarCurr,bitIndex21)<->v2339(VarCurr,bitIndex21)).
% 298.07/296.28  all VarCurr (-v3827(VarCurr)-> (v2327(VarCurr,bitIndex21)<->$F)).
% 298.07/296.28  all VarCurr (v3827(VarCurr)-> (v2327(VarCurr,bitIndex21)<->v2329(VarCurr))).
% 298.07/296.28  all VarCurr (v3827(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.07/296.28  all VarCurr (v963(VarCurr,bitIndex21)<->v2325(VarCurr,bitIndex21)).
% 298.07/296.28  all VarCurr (-v3825(VarCurr)-> (v965(VarCurr,bitIndex21)<->$F)).
% 298.07/296.28  all VarCurr (v3825(VarCurr)-> (v965(VarCurr,bitIndex21)<->v2201(VarCurr))).
% 298.07/296.28  all VarCurr (v3825(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.07/296.28  all VarCurr (v797(VarCurr,bitIndex22)<->v799(VarCurr,bitIndex22)).
% 298.07/296.28  all VarCurr (v799(VarCurr,bitIndex22)<->v2593(VarCurr,bitIndex22)).
% 298.07/296.28  all VarCurr (v801(VarCurr,bitIndex22)<->v2592(VarCurr,bitIndex22)).
% 298.07/296.28  all VarCurr (v2381(VarCurr,bitIndex22)<->v2383(VarCurr,bitIndex22)).
% 298.07/296.28  all VarCurr (v2383(VarCurr,bitIndex22)<->v2385(VarCurr,bitIndex22)).
% 298.07/296.28  all VarCurr (v2385(VarCurr,bitIndex22)<->v2387(VarCurr,bitIndex22)).
% 298.07/296.28  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3816(VarNext)-> (v2387(VarNext,bitIndex22)<->v2387(VarCurr,bitIndex22)))).
% 298.07/296.28  all VarNext (v3816(VarNext)-> (v2387(VarNext,bitIndex22)<->v3818(VarNext))).
% 298.07/296.28  all VarCurr (v3818(VarCurr)<->v2389(VarCurr)&v3819(VarCurr)).
% 298.07/296.28  all VarCurr (v3819(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1385))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1384))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1383))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1382))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1381))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1380))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1379))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1378))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1377))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1376))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1375))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1374))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1373))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1372))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1371))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1370))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1369))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1368))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1367))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1366))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1365))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1364))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1363))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1362))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1361))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1360))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1359))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1358))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1357))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1356))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1355))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1354))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1353))).
% 298.07/296.28  all VarCurr (v3816(VarCurr)<->v3817(VarCurr)&v2585(VarCurr)).
% 298.07/296.28  all VarCurr (-v3817(VarCurr)<->v2547(VarCurr)).
% 298.07/296.28  all VarNext (v803(VarNext,bitIndex22)<->v3807(VarNext,bitIndex22)).
% 298.07/296.28  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3809(VarNext)-> (all B (range_63_0(B)-> (v3807(VarNext,B)<->v803(VarCurr,B)))))).
% 298.07/296.28  all VarNext (v3809(VarNext)-> (all B (range_63_0(B)-> (v3807(VarNext,B)<->v2377(VarNext,B))))).
% 298.07/296.28  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3809(VarNext)<->v3810(VarNext)&v2370(VarNext))).
% 298.07/296.28  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3810(VarNext)<->v3812(VarNext)&v2343(VarNext))).
% 298.07/296.28  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3812(VarNext)<->v2350(VarNext))).
% 298.07/296.28  all VarNext (v959(VarNext,bitIndex22)<->v3799(VarNext,bitIndex22)).
% 298.07/296.28  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3800(VarNext)-> (all B (range_63_0(B)-> (v3799(VarNext,B)<->v959(VarCurr,B)))))).
% 298.07/296.28  all VarNext (v3800(VarNext)-> (all B (range_63_0(B)-> (v3799(VarNext,B)<->v2356(VarNext,B))))).
% 298.07/296.28  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3800(VarNext)<->v3801(VarNext))).
% 298.07/296.28  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3801(VarNext)<->v3803(VarNext)&v2343(VarNext))).
% 298.07/296.28  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3803(VarNext)<->v2350(VarNext))).
% 298.07/296.28  all VarCurr (v961(VarCurr,bitIndex22)<->v2339(VarCurr,bitIndex22)).
% 298.07/296.28  all VarCurr (-v3797(VarCurr)-> (v2327(VarCurr,bitIndex22)<->$F)).
% 298.07/296.28  all VarCurr (v3797(VarCurr)-> (v2327(VarCurr,bitIndex22)<->v2329(VarCurr))).
% 298.07/296.28  all VarCurr (v3797(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.07/296.28  all VarCurr (v963(VarCurr,bitIndex22)<->v2325(VarCurr,bitIndex22)).
% 298.07/296.28  all VarCurr (-v3795(VarCurr)-> (v965(VarCurr,bitIndex22)<->$F)).
% 298.07/296.28  all VarCurr (v3795(VarCurr)-> (v965(VarCurr,bitIndex22)<->v2201(VarCurr))).
% 298.07/296.28  all VarCurr (v3795(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.07/296.28  all VarCurr (v797(VarCurr,bitIndex23)<->v799(VarCurr,bitIndex23)).
% 298.07/296.28  all VarCurr (v799(VarCurr,bitIndex23)<->v2593(VarCurr,bitIndex23)).
% 298.07/296.28  all VarCurr (v801(VarCurr,bitIndex23)<->v2592(VarCurr,bitIndex23)).
% 298.07/296.28  all VarCurr (v2381(VarCurr,bitIndex23)<->v2383(VarCurr,bitIndex23)).
% 298.07/296.28  all VarCurr (v2383(VarCurr,bitIndex23)<->v2385(VarCurr,bitIndex23)).
% 298.07/296.28  all VarCurr (v2385(VarCurr,bitIndex23)<->v2387(VarCurr,bitIndex23)).
% 298.07/296.29  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3786(VarNext)-> (v2387(VarNext,bitIndex23)<->v2387(VarCurr,bitIndex23)))).
% 298.07/296.29  all VarNext (v3786(VarNext)-> (v2387(VarNext,bitIndex23)<->v3788(VarNext))).
% 298.07/296.29  all VarCurr (v3788(VarCurr)<->v2389(VarCurr)&v3789(VarCurr)).
% 298.07/296.29  all VarCurr (v3789(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1352))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1351))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1350))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1349))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1348))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1347))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1346))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1345))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1344))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1343))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1342))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1341))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1340))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1339))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1338))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1337))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1336))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1335))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1334))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1333))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1332))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1331))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1330))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1329))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1328))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1327))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1326))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1325))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1324))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1323))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1322))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1321))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1320))).
% 298.07/296.29  all VarCurr (v3786(VarCurr)<->v3787(VarCurr)&v2585(VarCurr)).
% 298.07/296.29  all VarCurr (-v3787(VarCurr)<->v2547(VarCurr)).
% 298.07/296.29  all VarNext (v803(VarNext,bitIndex23)<->v3777(VarNext,bitIndex23)).
% 298.07/296.29  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3779(VarNext)-> (all B (range_63_0(B)-> (v3777(VarNext,B)<->v803(VarCurr,B)))))).
% 298.07/296.29  all VarNext (v3779(VarNext)-> (all B (range_63_0(B)-> (v3777(VarNext,B)<->v2377(VarNext,B))))).
% 298.07/296.29  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3779(VarNext)<->v3780(VarNext)&v2370(VarNext))).
% 298.07/296.29  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3780(VarNext)<->v3782(VarNext)&v2343(VarNext))).
% 298.07/296.29  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3782(VarNext)<->v2350(VarNext))).
% 298.07/296.29  all VarNext (v959(VarNext,bitIndex23)<->v3769(VarNext,bitIndex23)).
% 298.07/296.29  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3770(VarNext)-> (all B (range_63_0(B)-> (v3769(VarNext,B)<->v959(VarCurr,B)))))).
% 298.07/296.29  all VarNext (v3770(VarNext)-> (all B (range_63_0(B)-> (v3769(VarNext,B)<->v2356(VarNext,B))))).
% 298.07/296.29  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3770(VarNext)<->v3771(VarNext))).
% 298.07/296.29  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3771(VarNext)<->v3773(VarNext)&v2343(VarNext))).
% 298.07/296.29  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3773(VarNext)<->v2350(VarNext))).
% 298.07/296.29  all VarCurr (v961(VarCurr,bitIndex23)<->v2339(VarCurr,bitIndex23)).
% 298.07/296.29  all VarCurr (-v3767(VarCurr)-> (v2327(VarCurr,bitIndex23)<->$F)).
% 298.07/296.29  all VarCurr (v3767(VarCurr)-> (v2327(VarCurr,bitIndex23)<->v2329(VarCurr))).
% 298.07/296.29  all VarCurr (v3767(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.07/296.29  all VarCurr (v963(VarCurr,bitIndex23)<->v2325(VarCurr,bitIndex23)).
% 298.07/296.29  all VarCurr (-v3765(VarCurr)-> (v965(VarCurr,bitIndex23)<->$F)).
% 298.07/296.30  all VarCurr (v3765(VarCurr)-> (v965(VarCurr,bitIndex23)<->v2201(VarCurr))).
% 298.07/296.30  all VarCurr (v3765(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.07/296.30  all VarCurr (v797(VarCurr,bitIndex24)<->v799(VarCurr,bitIndex24)).
% 298.07/296.30  all VarCurr (v799(VarCurr,bitIndex24)<->v2593(VarCurr,bitIndex24)).
% 298.07/296.30  all VarCurr (v801(VarCurr,bitIndex24)<->v2592(VarCurr,bitIndex24)).
% 298.07/296.30  all VarCurr (v2381(VarCurr,bitIndex24)<->v2383(VarCurr,bitIndex24)).
% 298.07/296.30  all VarCurr (v2383(VarCurr,bitIndex24)<->v2385(VarCurr,bitIndex24)).
% 298.07/296.30  all VarCurr (v2385(VarCurr,bitIndex24)<->v2387(VarCurr,bitIndex24)).
% 298.07/296.30  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3756(VarNext)-> (v2387(VarNext,bitIndex24)<->v2387(VarCurr,bitIndex24)))).
% 298.07/296.30  all VarNext (v3756(VarNext)-> (v2387(VarNext,bitIndex24)<->v3758(VarNext))).
% 298.07/296.30  all VarCurr (v3758(VarCurr)<->v2389(VarCurr)&v3759(VarCurr)).
% 298.07/296.30  all VarCurr (v3759(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1319))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1318))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1317))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1316))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1315))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1314))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1313))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1312))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1311))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1310))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1309))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1308))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1307))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1306))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1305))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1304))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1303))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1302))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1301))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1300))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1299))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1298))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1297))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1296))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1295))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1294))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1293))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1292))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1291))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1290))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1289))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1288))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1287))).
% 298.07/296.30  all VarCurr (v3756(VarCurr)<->v3757(VarCurr)&v2585(VarCurr)).
% 298.07/296.30  all VarCurr (-v3757(VarCurr)<->v2547(VarCurr)).
% 298.07/296.30  all VarNext (v803(VarNext,bitIndex24)<->v3747(VarNext,bitIndex24)).
% 298.07/296.30  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3749(VarNext)-> (all B (range_63_0(B)-> (v3747(VarNext,B)<->v803(VarCurr,B)))))).
% 298.07/296.30  all VarNext (v3749(VarNext)-> (all B (range_63_0(B)-> (v3747(VarNext,B)<->v2377(VarNext,B))))).
% 298.07/296.30  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3749(VarNext)<->v3750(VarNext)&v2370(VarNext))).
% 298.07/296.30  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3750(VarNext)<->v3752(VarNext)&v2343(VarNext))).
% 298.07/296.30  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3752(VarNext)<->v2350(VarNext))).
% 298.07/296.30  all VarNext (v959(VarNext,bitIndex24)<->v3739(VarNext,bitIndex24)).
% 298.07/296.30  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3740(VarNext)-> (all B (range_63_0(B)-> (v3739(VarNext,B)<->v959(VarCurr,B)))))).
% 298.07/296.30  all VarNext (v3740(VarNext)-> (all B (range_63_0(B)-> (v3739(VarNext,B)<->v2356(VarNext,B))))).
% 298.07/296.30  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3740(VarNext)<->v3741(VarNext))).
% 298.07/296.30  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3741(VarNext)<->v3743(VarNext)&v2343(VarNext))).
% 298.07/296.30  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3743(VarNext)<->v2350(VarNext))).
% 298.07/296.30  all VarCurr (v961(VarCurr,bitIndex24)<->v2339(VarCurr,bitIndex24)).
% 298.07/296.30  all VarCurr (-v3737(VarCurr)-> (v2327(VarCurr,bitIndex24)<->$F)).
% 298.07/296.30  all VarCurr (v3737(VarCurr)-> (v2327(VarCurr,bitIndex24)<->v2329(VarCurr))).
% 298.07/296.30  all VarCurr (v3737(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.07/296.30  all VarCurr (v963(VarCurr,bitIndex24)<->v2325(VarCurr,bitIndex24)).
% 298.07/296.30  all VarCurr (-v3735(VarCurr)-> (v965(VarCurr,bitIndex24)<->$F)).
% 298.07/296.30  all VarCurr (v3735(VarCurr)-> (v965(VarCurr,bitIndex24)<->v2201(VarCurr))).
% 298.07/296.30  all VarCurr (v3735(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.07/296.30  all VarCurr (v797(VarCurr,bitIndex25)<->v799(VarCurr,bitIndex25)).
% 298.07/296.30  all VarCurr (v799(VarCurr,bitIndex25)<->v2593(VarCurr,bitIndex25)).
% 298.07/296.30  all VarCurr (v801(VarCurr,bitIndex25)<->v2592(VarCurr,bitIndex25)).
% 298.07/296.30  all VarCurr (v2381(VarCurr,bitIndex25)<->v2383(VarCurr,bitIndex25)).
% 298.07/296.30  all VarCurr (v2383(VarCurr,bitIndex25)<->v2385(VarCurr,bitIndex25)).
% 298.07/296.30  all VarCurr (v2385(VarCurr,bitIndex25)<->v2387(VarCurr,bitIndex25)).
% 298.07/296.30  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3726(VarNext)-> (v2387(VarNext,bitIndex25)<->v2387(VarCurr,bitIndex25)))).
% 298.07/296.30  all VarNext (v3726(VarNext)-> (v2387(VarNext,bitIndex25)<->v3728(VarNext))).
% 298.07/296.30  all VarCurr (v3728(VarCurr)<->v2389(VarCurr)&v3729(VarCurr)).
% 298.07/296.30  all VarCurr (v3729(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1286))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1285))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1284))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1283))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1282))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1281))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1280))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1279))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1278))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1277))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1276))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1275))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1274))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1273))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1272))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1271))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1270))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1269))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1268))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1267))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1266))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1265))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1264))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1263))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1262))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1261))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1260))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1259))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1258))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1257))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1256))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1255))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1254))).
% 298.07/296.30  all VarCurr (v3726(VarCurr)<->v3727(VarCurr)&v2585(VarCurr)).
% 298.07/296.30  all VarCurr (-v3727(VarCurr)<->v2547(VarCurr)).
% 298.07/296.30  all VarNext (v803(VarNext,bitIndex25)<->v3717(VarNext,bitIndex25)).
% 298.07/296.30  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3719(VarNext)-> (all B (range_63_0(B)-> (v3717(VarNext,B)<->v803(VarCurr,B)))))).
% 298.07/296.30  all VarNext (v3719(VarNext)-> (all B (range_63_0(B)-> (v3717(VarNext,B)<->v2377(VarNext,B))))).
% 298.07/296.30  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3719(VarNext)<->v3720(VarNext)&v2370(VarNext))).
% 298.07/296.31  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3720(VarNext)<->v3722(VarNext)&v2343(VarNext))).
% 298.07/296.31  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3722(VarNext)<->v2350(VarNext))).
% 298.07/296.31  all VarNext (v959(VarNext,bitIndex25)<->v3709(VarNext,bitIndex25)).
% 298.07/296.31  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3710(VarNext)-> (all B (range_63_0(B)-> (v3709(VarNext,B)<->v959(VarCurr,B)))))).
% 298.07/296.31  all VarNext (v3710(VarNext)-> (all B (range_63_0(B)-> (v3709(VarNext,B)<->v2356(VarNext,B))))).
% 298.07/296.31  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3710(VarNext)<->v3711(VarNext))).
% 298.07/296.31  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3711(VarNext)<->v3713(VarNext)&v2343(VarNext))).
% 298.07/296.31  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3713(VarNext)<->v2350(VarNext))).
% 298.07/296.31  all VarCurr (v961(VarCurr,bitIndex25)<->v2339(VarCurr,bitIndex25)).
% 298.07/296.31  all VarCurr (-v3707(VarCurr)-> (v2327(VarCurr,bitIndex25)<->$F)).
% 298.07/296.31  all VarCurr (v3707(VarCurr)-> (v2327(VarCurr,bitIndex25)<->v2329(VarCurr))).
% 298.07/296.31  all VarCurr (v3707(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.07/296.31  all VarCurr (v963(VarCurr,bitIndex25)<->v2325(VarCurr,bitIndex25)).
% 298.07/296.31  all VarCurr (-v3705(VarCurr)-> (v965(VarCurr,bitIndex25)<->$F)).
% 298.07/296.31  all VarCurr (v3705(VarCurr)-> (v965(VarCurr,bitIndex25)<->v2201(VarCurr))).
% 298.07/296.31  all VarCurr (v3705(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.07/296.31  all VarCurr (v797(VarCurr,bitIndex26)<->v799(VarCurr,bitIndex26)).
% 298.07/296.31  all VarCurr (v799(VarCurr,bitIndex26)<->v2593(VarCurr,bitIndex26)).
% 298.07/296.31  all VarCurr (v801(VarCurr,bitIndex26)<->v2592(VarCurr,bitIndex26)).
% 298.07/296.31  all VarCurr (v2381(VarCurr,bitIndex26)<->v2383(VarCurr,bitIndex26)).
% 298.07/296.31  all VarCurr (v2383(VarCurr,bitIndex26)<->v2385(VarCurr,bitIndex26)).
% 298.07/296.31  all VarCurr (v2385(VarCurr,bitIndex26)<->v2387(VarCurr,bitIndex26)).
% 298.07/296.31  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3696(VarNext)-> (v2387(VarNext,bitIndex26)<->v2387(VarCurr,bitIndex26)))).
% 298.07/296.31  all VarNext (v3696(VarNext)-> (v2387(VarNext,bitIndex26)<->v3698(VarNext))).
% 298.07/296.31  all VarCurr (v3698(VarCurr)<->v2389(VarCurr)&v3699(VarCurr)).
% 298.07/296.31  all VarCurr (v3699(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1253))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1252))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1251))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1250))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1249))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1248))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1247))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1246))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1245))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1244))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1243))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1242))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1241))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1240))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1239))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1238))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1237))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1236))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1235))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1234))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1233))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1232))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1231))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1230))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1229))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1228))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1227))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1226))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1225))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1224))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1223))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1222))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1221))).
% 298.07/296.32  all VarCurr (v3696(VarCurr)<->v3697(VarCurr)&v2585(VarCurr)).
% 298.07/296.32  all VarCurr (-v3697(VarCurr)<->v2547(VarCurr)).
% 298.07/296.32  all VarNext (v803(VarNext,bitIndex26)<->v3687(VarNext,bitIndex26)).
% 298.07/296.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3689(VarNext)-> (all B (range_63_0(B)-> (v3687(VarNext,B)<->v803(VarCurr,B)))))).
% 298.07/296.32  all VarNext (v3689(VarNext)-> (all B (range_63_0(B)-> (v3687(VarNext,B)<->v2377(VarNext,B))))).
% 298.07/296.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3689(VarNext)<->v3690(VarNext)&v2370(VarNext))).
% 298.07/296.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3690(VarNext)<->v3692(VarNext)&v2343(VarNext))).
% 298.07/296.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3692(VarNext)<->v2350(VarNext))).
% 298.07/296.32  all VarNext (v959(VarNext,bitIndex26)<->v3679(VarNext,bitIndex26)).
% 298.07/296.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3680(VarNext)-> (all B (range_63_0(B)-> (v3679(VarNext,B)<->v959(VarCurr,B)))))).
% 298.07/296.32  all VarNext (v3680(VarNext)-> (all B (range_63_0(B)-> (v3679(VarNext,B)<->v2356(VarNext,B))))).
% 298.07/296.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3680(VarNext)<->v3681(VarNext))).
% 298.07/296.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3681(VarNext)<->v3683(VarNext)&v2343(VarNext))).
% 298.07/296.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3683(VarNext)<->v2350(VarNext))).
% 298.07/296.32  all VarCurr (v961(VarCurr,bitIndex26)<->v2339(VarCurr,bitIndex26)).
% 298.07/296.32  all VarCurr (-v3677(VarCurr)-> (v2327(VarCurr,bitIndex26)<->$F)).
% 298.07/296.32  all VarCurr (v3677(VarCurr)-> (v2327(VarCurr,bitIndex26)<->v2329(VarCurr))).
% 298.07/296.32  all VarCurr (v3677(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.07/296.32  all VarCurr (v963(VarCurr,bitIndex26)<->v2325(VarCurr,bitIndex26)).
% 298.07/296.32  all VarCurr (-v3675(VarCurr)-> (v965(VarCurr,bitIndex26)<->$F)).
% 298.07/296.32  all VarCurr (v3675(VarCurr)-> (v965(VarCurr,bitIndex26)<->v2201(VarCurr))).
% 298.07/296.32  all VarCurr (v3675(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.07/296.32  all VarCurr (v797(VarCurr,bitIndex27)<->v799(VarCurr,bitIndex27)).
% 298.07/296.32  all VarCurr (v799(VarCurr,bitIndex27)<->v2593(VarCurr,bitIndex27)).
% 298.07/296.32  all VarCurr (v801(VarCurr,bitIndex27)<->v2592(VarCurr,bitIndex27)).
% 298.07/296.32  all VarCurr (v2381(VarCurr,bitIndex27)<->v2383(VarCurr,bitIndex27)).
% 298.07/296.32  all VarCurr (v2383(VarCurr,bitIndex27)<->v2385(VarCurr,bitIndex27)).
% 298.07/296.32  all VarCurr (v2385(VarCurr,bitIndex27)<->v2387(VarCurr,bitIndex27)).
% 298.07/296.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3666(VarNext)-> (v2387(VarNext,bitIndex27)<->v2387(VarCurr,bitIndex27)))).
% 298.07/296.32  all VarNext (v3666(VarNext)-> (v2387(VarNext,bitIndex27)<->v3668(VarNext))).
% 298.07/296.32  all VarCurr (v3668(VarCurr)<->v2389(VarCurr)&v3669(VarCurr)).
% 298.07/296.32  all VarCurr (v3669(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1220))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1219))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1218))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1217))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1216))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1215))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1214))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1213))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1212))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1211))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1210))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1209))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1208))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1207))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1206))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1205))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1204))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1203))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1202))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1201))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1200))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1199))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1198))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1197))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1196))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1195))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1194))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1193))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1192))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1191))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1190))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1189))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1188))).
% 298.07/296.32  all VarCurr (v3666(VarCurr)<->v3667(VarCurr)&v2585(VarCurr)).
% 298.07/296.32  all VarCurr (-v3667(VarCurr)<->v2547(VarCurr)).
% 298.07/296.32  all VarNext (v803(VarNext,bitIndex27)<->v3657(VarNext,bitIndex27)).
% 298.07/296.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3659(VarNext)-> (all B (range_63_0(B)-> (v3657(VarNext,B)<->v803(VarCurr,B)))))).
% 298.07/296.32  all VarNext (v3659(VarNext)-> (all B (range_63_0(B)-> (v3657(VarNext,B)<->v2377(VarNext,B))))).
% 298.07/296.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3659(VarNext)<->v3660(VarNext)&v2370(VarNext))).
% 298.07/296.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3660(VarNext)<->v3662(VarNext)&v2343(VarNext))).
% 298.07/296.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3662(VarNext)<->v2350(VarNext))).
% 298.07/296.32  all VarNext (v959(VarNext,bitIndex27)<->v3649(VarNext,bitIndex27)).
% 298.07/296.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3650(VarNext)-> (all B (range_63_0(B)-> (v3649(VarNext,B)<->v959(VarCurr,B)))))).
% 298.07/296.32  all VarNext (v3650(VarNext)-> (all B (range_63_0(B)-> (v3649(VarNext,B)<->v2356(VarNext,B))))).
% 298.07/296.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3650(VarNext)<->v3651(VarNext))).
% 298.07/296.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3651(VarNext)<->v3653(VarNext)&v2343(VarNext))).
% 298.07/296.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3653(VarNext)<->v2350(VarNext))).
% 298.07/296.32  all VarCurr (v961(VarCurr,bitIndex27)<->v2339(VarCurr,bitIndex27)).
% 298.07/296.32  all VarCurr (-v3647(VarCurr)-> (v2327(VarCurr,bitIndex27)<->$F)).
% 298.07/296.32  all VarCurr (v3647(VarCurr)-> (v2327(VarCurr,bitIndex27)<->v2329(VarCurr))).
% 298.07/296.32  all VarCurr (v3647(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.07/296.32  all VarCurr (v963(VarCurr,bitIndex27)<->v2325(VarCurr,bitIndex27)).
% 298.07/296.32  all VarCurr (-v3645(VarCurr)-> (v965(VarCurr,bitIndex27)<->$F)).
% 298.07/296.32  all VarCurr (v3645(VarCurr)-> (v965(VarCurr,bitIndex27)<->v2201(VarCurr))).
% 298.07/296.32  all VarCurr (v3645(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.07/296.32  all VarCurr (v797(VarCurr,bitIndex28)<->v799(VarCurr,bitIndex28)).
% 298.07/296.32  all VarCurr (v799(VarCurr,bitIndex28)<->v2593(VarCurr,bitIndex28)).
% 298.07/296.32  all VarCurr (v801(VarCurr,bitIndex28)<->v2592(VarCurr,bitIndex28)).
% 298.07/296.32  all VarCurr (v2381(VarCurr,bitIndex28)<->v2383(VarCurr,bitIndex28)).
% 298.07/296.32  all VarCurr (v2383(VarCurr,bitIndex28)<->v2385(VarCurr,bitIndex28)).
% 298.07/296.32  all VarCurr (v2385(VarCurr,bitIndex28)<->v2387(VarCurr,bitIndex28)).
% 298.07/296.32  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3636(VarNext)-> (v2387(VarNext,bitIndex28)<->v2387(VarCurr,bitIndex28)))).
% 298.07/296.32  all VarNext (v3636(VarNext)-> (v2387(VarNext,bitIndex28)<->v3638(VarNext))).
% 298.07/296.32  all VarCurr (v3638(VarCurr)<->v2389(VarCurr)&v3639(VarCurr)).
% 298.07/296.32  all VarCurr (v3639(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1187))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1186))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1185))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1184))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1183))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1182))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1181))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1180))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1179))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1178))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1177))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1176))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1175))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1174))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1173))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1172))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1171))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1170))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1169))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1168))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1167))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1166))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1165))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1164))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1163))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1162))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1161))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1160))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1159))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1158))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1157))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1156))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1155))).
% 298.14/296.33  all VarCurr (v3636(VarCurr)<->v3637(VarCurr)&v2585(VarCurr)).
% 298.14/296.33  all VarCurr (-v3637(VarCurr)<->v2547(VarCurr)).
% 298.14/296.33  all VarNext (v803(VarNext,bitIndex28)<->v3627(VarNext,bitIndex28)).
% 298.14/296.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3629(VarNext)-> (all B (range_63_0(B)-> (v3627(VarNext,B)<->v803(VarCurr,B)))))).
% 298.14/296.33  all VarNext (v3629(VarNext)-> (all B (range_63_0(B)-> (v3627(VarNext,B)<->v2377(VarNext,B))))).
% 298.14/296.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3629(VarNext)<->v3630(VarNext)&v2370(VarNext))).
% 298.14/296.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3630(VarNext)<->v3632(VarNext)&v2343(VarNext))).
% 298.14/296.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3632(VarNext)<->v2350(VarNext))).
% 298.14/296.33  all VarNext (v959(VarNext,bitIndex28)<->v3619(VarNext,bitIndex28)).
% 298.14/296.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3620(VarNext)-> (all B (range_63_0(B)-> (v3619(VarNext,B)<->v959(VarCurr,B)))))).
% 298.14/296.33  all VarNext (v3620(VarNext)-> (all B (range_63_0(B)-> (v3619(VarNext,B)<->v2356(VarNext,B))))).
% 298.14/296.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3620(VarNext)<->v3621(VarNext))).
% 298.14/296.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3621(VarNext)<->v3623(VarNext)&v2343(VarNext))).
% 298.14/296.33  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3623(VarNext)<->v2350(VarNext))).
% 298.14/296.33  all VarCurr (v961(VarCurr,bitIndex28)<->v2339(VarCurr,bitIndex28)).
% 298.14/296.33  all VarCurr (-v3617(VarCurr)-> (v2327(VarCurr,bitIndex28)<->$F)).
% 298.14/296.33  all VarCurr (v3617(VarCurr)-> (v2327(VarCurr,bitIndex28)<->v2329(VarCurr))).
% 298.14/296.33  all VarCurr (v3617(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.14/296.33  all VarCurr (v963(VarCurr,bitIndex28)<->v2325(VarCurr,bitIndex28)).
% 298.14/296.33  all VarCurr (-v3615(VarCurr)-> (v965(VarCurr,bitIndex28)<->$F)).
% 298.14/296.33  all VarCurr (v3615(VarCurr)-> (v965(VarCurr,bitIndex28)<->v2201(VarCurr))).
% 298.14/296.33  all VarCurr (v3615(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.14/296.33  all VarCurr (v797(VarCurr,bitIndex29)<->v799(VarCurr,bitIndex29)).
% 298.14/296.33  all VarCurr (v799(VarCurr,bitIndex29)<->v2593(VarCurr,bitIndex29)).
% 298.14/296.33  all VarCurr (v801(VarCurr,bitIndex29)<->v2592(VarCurr,bitIndex29)).
% 298.14/296.33  all VarCurr (v2381(VarCurr,bitIndex29)<->v2383(VarCurr,bitIndex29)).
% 298.14/296.33  all VarCurr (v2383(VarCurr,bitIndex29)<->v2385(VarCurr,bitIndex29)).
% 298.14/296.34  all VarCurr (v2385(VarCurr,bitIndex29)<->v2387(VarCurr,bitIndex29)).
% 298.14/296.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3606(VarNext)-> (v2387(VarNext,bitIndex29)<->v2387(VarCurr,bitIndex29)))).
% 298.14/296.34  all VarNext (v3606(VarNext)-> (v2387(VarNext,bitIndex29)<->v3608(VarNext))).
% 298.14/296.34  all VarCurr (v3608(VarCurr)<->v2389(VarCurr)&v3609(VarCurr)).
% 298.14/296.34  all VarCurr (v3609(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1154))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1153))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1152))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1151))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1150))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1149))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1148))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1147))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1146))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1145))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1144))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1143))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1142))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1141))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1140))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1139))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1138))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1137))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1136))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1135))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1134))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1133))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1132))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1131))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1130))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1129))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1128))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1127))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1126))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1125))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1124))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1123))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1122))).
% 298.14/296.34  all VarCurr (v3606(VarCurr)<->v3607(VarCurr)&v2585(VarCurr)).
% 298.14/296.34  all VarCurr (-v3607(VarCurr)<->v2547(VarCurr)).
% 298.14/296.34  all VarNext (v803(VarNext,bitIndex29)<->v3597(VarNext,bitIndex29)).
% 298.14/296.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3599(VarNext)-> (all B (range_63_0(B)-> (v3597(VarNext,B)<->v803(VarCurr,B)))))).
% 298.14/296.34  all VarNext (v3599(VarNext)-> (all B (range_63_0(B)-> (v3597(VarNext,B)<->v2377(VarNext,B))))).
% 298.14/296.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3599(VarNext)<->v3600(VarNext)&v2370(VarNext))).
% 298.14/296.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3600(VarNext)<->v3602(VarNext)&v2343(VarNext))).
% 298.14/296.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3602(VarNext)<->v2350(VarNext))).
% 298.14/296.34  all VarNext (v959(VarNext,bitIndex29)<->v3589(VarNext,bitIndex29)).
% 298.14/296.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3590(VarNext)-> (all B (range_63_0(B)-> (v3589(VarNext,B)<->v959(VarCurr,B)))))).
% 298.14/296.34  all VarNext (v3590(VarNext)-> (all B (range_63_0(B)-> (v3589(VarNext,B)<->v2356(VarNext,B))))).
% 298.14/296.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3590(VarNext)<->v3591(VarNext))).
% 298.14/296.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3591(VarNext)<->v3593(VarNext)&v2343(VarNext))).
% 298.14/296.34  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3593(VarNext)<->v2350(VarNext))).
% 298.14/296.34  all VarCurr (v961(VarCurr,bitIndex29)<->v2339(VarCurr,bitIndex29)).
% 298.14/296.34  all VarCurr (-v3587(VarCurr)-> (v2327(VarCurr,bitIndex29)<->$F)).
% 298.14/296.34  all VarCurr (v3587(VarCurr)-> (v2327(VarCurr,bitIndex29)<->v2329(VarCurr))).
% 298.14/296.34  all VarCurr (v3587(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.14/296.34  all VarCurr (v963(VarCurr,bitIndex29)<->v2325(VarCurr,bitIndex29)).
% 298.14/296.35  all VarCurr (-v3585(VarCurr)-> (v965(VarCurr,bitIndex29)<->$F)).
% 298.14/296.35  all VarCurr (v3585(VarCurr)-> (v965(VarCurr,bitIndex29)<->v2201(VarCurr))).
% 298.14/296.35  all VarCurr (v3585(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.14/296.35  all VarCurr (v797(VarCurr,bitIndex30)<->v799(VarCurr,bitIndex30)).
% 298.14/296.35  all VarCurr (v799(VarCurr,bitIndex30)<->v2593(VarCurr,bitIndex30)).
% 298.14/296.35  all VarCurr (v801(VarCurr,bitIndex30)<->v2592(VarCurr,bitIndex30)).
% 298.14/296.35  all VarCurr (v2381(VarCurr,bitIndex30)<->v2383(VarCurr,bitIndex30)).
% 298.14/296.35  all VarCurr (v2383(VarCurr,bitIndex30)<->v2385(VarCurr,bitIndex30)).
% 298.14/296.35  all VarCurr (v2385(VarCurr,bitIndex30)<->v2387(VarCurr,bitIndex30)).
% 298.14/296.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3576(VarNext)-> (v2387(VarNext,bitIndex30)<->v2387(VarCurr,bitIndex30)))).
% 298.14/296.35  all VarNext (v3576(VarNext)-> (v2387(VarNext,bitIndex30)<->v3578(VarNext))).
% 298.14/296.35  all VarCurr (v3578(VarCurr)<->v2389(VarCurr)&v3579(VarCurr)).
% 298.14/296.35  all VarCurr (v3579(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1121))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1120))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1119))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1118))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1117))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1116))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1115))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1114))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1113))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1112))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1111))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1110))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1109))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1108))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1107))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1106))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1105))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1104))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1103))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1102))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1101))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1100))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1099))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1098))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1097))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1096))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1095))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1094))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1093))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1092))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1091))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1090))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1089))).
% 298.14/296.35  all VarCurr (v3576(VarCurr)<->v3577(VarCurr)&v2585(VarCurr)).
% 298.14/296.35  all VarCurr (-v3577(VarCurr)<->v2547(VarCurr)).
% 298.14/296.35  all VarNext (v803(VarNext,bitIndex30)<->v3567(VarNext,bitIndex30)).
% 298.14/296.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3569(VarNext)-> (all B (range_63_0(B)-> (v3567(VarNext,B)<->v803(VarCurr,B)))))).
% 298.14/296.35  all VarNext (v3569(VarNext)-> (all B (range_63_0(B)-> (v3567(VarNext,B)<->v2377(VarNext,B))))).
% 298.14/296.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3569(VarNext)<->v3570(VarNext)&v2370(VarNext))).
% 298.14/296.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3570(VarNext)<->v3572(VarNext)&v2343(VarNext))).
% 298.14/296.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3572(VarNext)<->v2350(VarNext))).
% 298.14/296.35  all VarNext (v959(VarNext,bitIndex30)<->v3559(VarNext,bitIndex30)).
% 298.14/296.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3560(VarNext)-> (all B (range_63_0(B)-> (v3559(VarNext,B)<->v959(VarCurr,B)))))).
% 298.14/296.35  all VarNext (v3560(VarNext)-> (all B (range_63_0(B)-> (v3559(VarNext,B)<->v2356(VarNext,B))))).
% 298.14/296.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3560(VarNext)<->v3561(VarNext))).
% 298.14/296.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3561(VarNext)<->v3563(VarNext)&v2343(VarNext))).
% 298.14/296.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3563(VarNext)<->v2350(VarNext))).
% 298.14/296.35  all VarCurr (v961(VarCurr,bitIndex30)<->v2339(VarCurr,bitIndex30)).
% 298.14/296.35  all VarCurr (-v3557(VarCurr)-> (v2327(VarCurr,bitIndex30)<->$F)).
% 298.14/296.35  all VarCurr (v3557(VarCurr)-> (v2327(VarCurr,bitIndex30)<->v2329(VarCurr))).
% 298.14/296.35  all VarCurr (v3557(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.14/296.35  all VarCurr (v963(VarCurr,bitIndex30)<->v2325(VarCurr,bitIndex30)).
% 298.14/296.35  all VarCurr (-v3555(VarCurr)-> (v965(VarCurr,bitIndex30)<->$F)).
% 298.14/296.35  all VarCurr (v3555(VarCurr)-> (v965(VarCurr,bitIndex30)<->v2201(VarCurr))).
% 298.14/296.35  all VarCurr (v3555(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.14/296.35  all VarCurr (v797(VarCurr,bitIndex31)<->v799(VarCurr,bitIndex31)).
% 298.14/296.35  all VarCurr (v799(VarCurr,bitIndex31)<->v2593(VarCurr,bitIndex31)).
% 298.14/296.35  all VarCurr (v801(VarCurr,bitIndex31)<->v2592(VarCurr,bitIndex31)).
% 298.14/296.35  all VarCurr (v2381(VarCurr,bitIndex31)<->v2383(VarCurr,bitIndex31)).
% 298.14/296.35  all VarCurr (v2383(VarCurr,bitIndex31)<->v2385(VarCurr,bitIndex31)).
% 298.14/296.35  all VarCurr (v2385(VarCurr,bitIndex31)<->v2387(VarCurr,bitIndex31)).
% 298.14/296.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3546(VarNext)-> (v2387(VarNext,bitIndex31)<->v2387(VarCurr,bitIndex31)))).
% 298.14/296.35  all VarNext (v3546(VarNext)-> (v2387(VarNext,bitIndex31)<->v3548(VarNext))).
% 298.14/296.35  all VarCurr (v3548(VarCurr)<->v2389(VarCurr)&v3549(VarCurr)).
% 298.14/296.35  all VarCurr (v3549(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1088))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1087))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1086))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1085))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1084))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1083))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1082))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1081))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1080))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1079))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1078))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1077))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1076))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1075))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1074))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1073))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1072))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1071))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1070))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1069))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1068))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1067))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1066))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1065))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1064))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1063))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1062))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1061))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1060))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1059))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1058))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1057))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1056))).
% 298.14/296.35  all VarCurr (v3546(VarCurr)<->v3547(VarCurr)&v2585(VarCurr)).
% 298.14/296.35  all VarCurr (-v3547(VarCurr)<->v2547(VarCurr)).
% 298.14/296.35  all VarNext (v803(VarNext,bitIndex31)<->v3537(VarNext,bitIndex31)).
% 298.14/296.35  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3539(VarNext)-> (all B (range_63_0(B)-> (v3537(VarNext,B)<->v803(VarCurr,B)))))).
% 298.14/296.35  all VarNext (v3539(VarNext)-> (all B (range_63_0(B)-> (v3537(VarNext,B)<->v2377(VarNext,B))))).
% 298.14/296.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3539(VarNext)<->v3540(VarNext)&v2370(VarNext))).
% 298.14/296.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3540(VarNext)<->v3542(VarNext)&v2343(VarNext))).
% 298.14/296.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3542(VarNext)<->v2350(VarNext))).
% 298.14/296.36  all VarNext (v959(VarNext,bitIndex31)<->v3529(VarNext,bitIndex31)).
% 298.14/296.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3530(VarNext)-> (all B (range_63_0(B)-> (v3529(VarNext,B)<->v959(VarCurr,B)))))).
% 298.14/296.36  all VarNext (v3530(VarNext)-> (all B (range_63_0(B)-> (v3529(VarNext,B)<->v2356(VarNext,B))))).
% 298.14/296.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3530(VarNext)<->v3531(VarNext))).
% 298.14/296.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3531(VarNext)<->v3533(VarNext)&v2343(VarNext))).
% 298.14/296.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3533(VarNext)<->v2350(VarNext))).
% 298.14/296.36  all VarCurr (v961(VarCurr,bitIndex31)<->v2339(VarCurr,bitIndex31)).
% 298.14/296.36  all VarCurr (-v3527(VarCurr)-> (v2327(VarCurr,bitIndex31)<->$F)).
% 298.14/296.36  all VarCurr (v3527(VarCurr)-> (v2327(VarCurr,bitIndex31)<->v2329(VarCurr))).
% 298.14/296.36  all VarCurr (v3527(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.14/296.36  all VarCurr (v963(VarCurr,bitIndex31)<->v2325(VarCurr,bitIndex31)).
% 298.14/296.36  all VarCurr (-v3525(VarCurr)-> (v965(VarCurr,bitIndex31)<->$F)).
% 298.14/296.36  all VarCurr (v3525(VarCurr)-> (v965(VarCurr,bitIndex31)<->v2201(VarCurr))).
% 298.14/296.36  all VarCurr (v3525(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$F)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.14/296.36  all VarCurr (v797(VarCurr,bitIndex32)<->v799(VarCurr,bitIndex32)).
% 298.14/296.36  all VarCurr (v799(VarCurr,bitIndex32)<->v2593(VarCurr,bitIndex32)).
% 298.14/296.36  all VarCurr (v801(VarCurr,bitIndex32)<->v2592(VarCurr,bitIndex32)).
% 298.14/296.36  all VarCurr (v2381(VarCurr,bitIndex32)<->v2383(VarCurr,bitIndex32)).
% 298.14/296.36  all VarCurr (v2383(VarCurr,bitIndex32)<->v2385(VarCurr,bitIndex32)).
% 298.14/296.36  all VarCurr (v2385(VarCurr,bitIndex32)<->v2387(VarCurr,bitIndex32)).
% 298.14/296.36  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3516(VarNext)-> (v2387(VarNext,bitIndex32)<->v2387(VarCurr,bitIndex32)))).
% 298.14/296.36  all VarNext (v3516(VarNext)-> (v2387(VarNext,bitIndex32)<->v3518(VarNext))).
% 298.14/296.36  all VarCurr (v3518(VarCurr)<->v2389(VarCurr)&v3519(VarCurr)).
% 298.14/296.36  all VarCurr (v3519(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1055))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1054))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1053))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1052))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1051))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1050))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1049))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1048))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1047))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1046))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1045))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1044))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1043))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1042))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1041))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1040))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1039))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1038))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1037))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1036))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1035))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1034))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1033))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex1032))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex1031))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex1030))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex1029))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex1028))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex1027))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex1026))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex1025))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1024))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex1023))).
% 298.17/296.37  all VarCurr (v3516(VarCurr)<->v3517(VarCurr)&v2585(VarCurr)).
% 298.17/296.37  all VarCurr (-v3517(VarCurr)<->v2547(VarCurr)).
% 298.17/296.37  all VarNext (v803(VarNext,bitIndex32)<->v3507(VarNext,bitIndex32)).
% 298.17/296.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3509(VarNext)-> (all B (range_63_0(B)-> (v3507(VarNext,B)<->v803(VarCurr,B)))))).
% 298.17/296.37  all VarNext (v3509(VarNext)-> (all B (range_63_0(B)-> (v3507(VarNext,B)<->v2377(VarNext,B))))).
% 298.17/296.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3509(VarNext)<->v3510(VarNext)&v2370(VarNext))).
% 298.17/296.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3510(VarNext)<->v3512(VarNext)&v2343(VarNext))).
% 298.17/296.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3512(VarNext)<->v2350(VarNext))).
% 298.17/296.37  all VarNext (v959(VarNext,bitIndex32)<->v3499(VarNext,bitIndex32)).
% 298.17/296.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3500(VarNext)-> (all B (range_63_0(B)-> (v3499(VarNext,B)<->v959(VarCurr,B)))))).
% 298.17/296.37  all VarNext (v3500(VarNext)-> (all B (range_63_0(B)-> (v3499(VarNext,B)<->v2356(VarNext,B))))).
% 298.17/296.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3500(VarNext)<->v3501(VarNext))).
% 298.17/296.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3501(VarNext)<->v3503(VarNext)&v2343(VarNext))).
% 298.17/296.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3503(VarNext)<->v2350(VarNext))).
% 298.17/296.37  all VarCurr (v961(VarCurr,bitIndex32)<->v2339(VarCurr,bitIndex32)).
% 298.17/296.37  all VarCurr (-v3497(VarCurr)-> (v2327(VarCurr,bitIndex32)<->$F)).
% 298.17/296.37  all VarCurr (v3497(VarCurr)-> (v2327(VarCurr,bitIndex32)<->v2329(VarCurr))).
% 298.17/296.37  all VarCurr (v3497(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.17/296.37  all VarCurr (v963(VarCurr,bitIndex32)<->v2325(VarCurr,bitIndex32)).
% 298.17/296.37  all VarCurr (-v3495(VarCurr)-> (v965(VarCurr,bitIndex32)<->$F)).
% 298.17/296.37  all VarCurr (v3495(VarCurr)-> (v965(VarCurr,bitIndex32)<->v2201(VarCurr))).
% 298.17/296.37  all VarCurr (v3495(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.17/296.37  all VarCurr (v797(VarCurr,bitIndex33)<->v799(VarCurr,bitIndex33)).
% 298.17/296.37  all VarCurr (v799(VarCurr,bitIndex33)<->v2593(VarCurr,bitIndex33)).
% 298.17/296.37  all VarCurr (v801(VarCurr,bitIndex33)<->v2592(VarCurr,bitIndex33)).
% 298.17/296.37  all VarCurr (v2381(VarCurr,bitIndex33)<->v2383(VarCurr,bitIndex33)).
% 298.17/296.37  all VarCurr (v2383(VarCurr,bitIndex33)<->v2385(VarCurr,bitIndex33)).
% 298.17/296.37  all VarCurr (v2385(VarCurr,bitIndex33)<->v2387(VarCurr,bitIndex33)).
% 298.17/296.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3486(VarNext)-> (v2387(VarNext,bitIndex33)<->v2387(VarCurr,bitIndex33)))).
% 298.17/296.37  all VarNext (v3486(VarNext)-> (v2387(VarNext,bitIndex33)<->v3488(VarNext))).
% 298.17/296.37  all VarCurr (v3488(VarCurr)<->v2389(VarCurr)&v3489(VarCurr)).
% 298.17/296.37  all VarCurr (v3489(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex1022))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex1021))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex1020))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex1019))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex1018))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex1017))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex1016))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex1015))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex1014))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex1013))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex1012))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex1011))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex1010))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex1009))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex1008))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex1007))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex1006))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex1005))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex1004))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex1003))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex1002))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex1001))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex1000))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex999))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex998))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex997))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex996))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex995))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex994))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex993))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex992))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex991))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex990))).
% 298.17/296.37  all VarCurr (v3486(VarCurr)<->v3487(VarCurr)&v2585(VarCurr)).
% 298.17/296.37  all VarCurr (-v3487(VarCurr)<->v2547(VarCurr)).
% 298.17/296.37  all VarNext (v803(VarNext,bitIndex33)<->v3477(VarNext,bitIndex33)).
% 298.17/296.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3479(VarNext)-> (all B (range_63_0(B)-> (v3477(VarNext,B)<->v803(VarCurr,B)))))).
% 298.17/296.37  all VarNext (v3479(VarNext)-> (all B (range_63_0(B)-> (v3477(VarNext,B)<->v2377(VarNext,B))))).
% 298.17/296.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3479(VarNext)<->v3480(VarNext)&v2370(VarNext))).
% 298.17/296.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3480(VarNext)<->v3482(VarNext)&v2343(VarNext))).
% 298.17/296.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3482(VarNext)<->v2350(VarNext))).
% 298.17/296.37  all VarNext (v959(VarNext,bitIndex33)<->v3469(VarNext,bitIndex33)).
% 298.17/296.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3470(VarNext)-> (all B (range_63_0(B)-> (v3469(VarNext,B)<->v959(VarCurr,B)))))).
% 298.17/296.37  all VarNext (v3470(VarNext)-> (all B (range_63_0(B)-> (v3469(VarNext,B)<->v2356(VarNext,B))))).
% 298.17/296.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3470(VarNext)<->v3471(VarNext))).
% 298.17/296.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3471(VarNext)<->v3473(VarNext)&v2343(VarNext))).
% 298.17/296.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3473(VarNext)<->v2350(VarNext))).
% 298.17/296.37  all VarCurr (v961(VarCurr,bitIndex33)<->v2339(VarCurr,bitIndex33)).
% 298.17/296.37  all VarCurr (-v3467(VarCurr)-> (v2327(VarCurr,bitIndex33)<->$F)).
% 298.17/296.37  all VarCurr (v3467(VarCurr)-> (v2327(VarCurr,bitIndex33)<->v2329(VarCurr))).
% 298.17/296.37  all VarCurr (v3467(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.17/296.37  all VarCurr (v963(VarCurr,bitIndex33)<->v2325(VarCurr,bitIndex33)).
% 298.17/296.37  all VarCurr (-v3465(VarCurr)-> (v965(VarCurr,bitIndex33)<->$F)).
% 298.17/296.37  all VarCurr (v3465(VarCurr)-> (v965(VarCurr,bitIndex33)<->v2201(VarCurr))).
% 298.17/296.37  all VarCurr (v3465(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.17/296.37  all VarCurr (v797(VarCurr,bitIndex34)<->v799(VarCurr,bitIndex34)).
% 298.17/296.37  all VarCurr (v799(VarCurr,bitIndex34)<->v2593(VarCurr,bitIndex34)).
% 298.17/296.37  all VarCurr (v801(VarCurr,bitIndex34)<->v2592(VarCurr,bitIndex34)).
% 298.17/296.37  all VarCurr (v2381(VarCurr,bitIndex34)<->v2383(VarCurr,bitIndex34)).
% 298.17/296.37  all VarCurr (v2383(VarCurr,bitIndex34)<->v2385(VarCurr,bitIndex34)).
% 298.17/296.37  all VarCurr (v2385(VarCurr,bitIndex34)<->v2387(VarCurr,bitIndex34)).
% 298.17/296.37  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3456(VarNext)-> (v2387(VarNext,bitIndex34)<->v2387(VarCurr,bitIndex34)))).
% 298.17/296.37  all VarNext (v3456(VarNext)-> (v2387(VarNext,bitIndex34)<->v3458(VarNext))).
% 298.17/296.37  all VarCurr (v3458(VarCurr)<->v2389(VarCurr)&v3459(VarCurr)).
% 298.17/296.37  all VarCurr (v3459(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex989))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex988))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex987))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex986))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex985))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex984))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex983))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex982))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex981))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex980))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex979))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex978))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex977))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex976))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex975))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex974))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex973))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex972))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex971))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex970))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex969))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex968))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex967))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex966))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex965))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex964))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex963))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex962))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex961))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex960))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex959))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex958))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex957))).
% 298.17/296.38  all VarCurr (v3456(VarCurr)<->v3457(VarCurr)&v2585(VarCurr)).
% 298.17/296.38  all VarCurr (-v3457(VarCurr)<->v2547(VarCurr)).
% 298.17/296.38  all VarNext (v803(VarNext,bitIndex34)<->v3447(VarNext,bitIndex34)).
% 298.17/296.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3449(VarNext)-> (all B (range_63_0(B)-> (v3447(VarNext,B)<->v803(VarCurr,B)))))).
% 298.17/296.38  all VarNext (v3449(VarNext)-> (all B (range_63_0(B)-> (v3447(VarNext,B)<->v2377(VarNext,B))))).
% 298.17/296.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3449(VarNext)<->v3450(VarNext)&v2370(VarNext))).
% 298.17/296.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3450(VarNext)<->v3452(VarNext)&v2343(VarNext))).
% 298.17/296.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3452(VarNext)<->v2350(VarNext))).
% 298.17/296.38  all VarNext (v959(VarNext,bitIndex34)<->v3439(VarNext,bitIndex34)).
% 298.17/296.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3440(VarNext)-> (all B (range_63_0(B)-> (v3439(VarNext,B)<->v959(VarCurr,B)))))).
% 298.17/296.38  all VarNext (v3440(VarNext)-> (all B (range_63_0(B)-> (v3439(VarNext,B)<->v2356(VarNext,B))))).
% 298.17/296.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3440(VarNext)<->v3441(VarNext))).
% 298.17/296.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3441(VarNext)<->v3443(VarNext)&v2343(VarNext))).
% 298.17/296.38  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3443(VarNext)<->v2350(VarNext))).
% 298.17/296.38  all VarCurr (v961(VarCurr,bitIndex34)<->v2339(VarCurr,bitIndex34)).
% 298.17/296.38  all VarCurr (-v3437(VarCurr)-> (v2327(VarCurr,bitIndex34)<->$F)).
% 298.17/296.38  all VarCurr (v3437(VarCurr)-> (v2327(VarCurr,bitIndex34)<->v2329(VarCurr))).
% 298.17/296.38  all VarCurr (v3437(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.17/296.38  all VarCurr (v963(VarCurr,bitIndex34)<->v2325(VarCurr,bitIndex34)).
% 298.17/296.38  all VarCurr (-v3435(VarCurr)-> (v965(VarCurr,bitIndex34)<->$F)).
% 298.17/296.38  all VarCurr (v3435(VarCurr)-> (v965(VarCurr,bitIndex34)<->v2201(VarCurr))).
% 298.17/296.38  all VarCurr (v3435(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.17/296.38  all VarCurr (v797(VarCurr,bitIndex35)<->v799(VarCurr,bitIndex35)).
% 298.17/296.38  all VarCurr (v799(VarCurr,bitIndex35)<->v2593(VarCurr,bitIndex35)).
% 298.17/296.38  all VarCurr (v801(VarCurr,bitIndex35)<->v2592(VarCurr,bitIndex35)).
% 298.17/296.38  all VarCurr (v2381(VarCurr,bitIndex35)<->v2383(VarCurr,bitIndex35)).
% 298.17/296.38  all VarCurr (v2383(VarCurr,bitIndex35)<->v2385(VarCurr,bitIndex35)).
% 298.17/296.39  all VarCurr (v2385(VarCurr,bitIndex35)<->v2387(VarCurr,bitIndex35)).
% 298.17/296.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3426(VarNext)-> (v2387(VarNext,bitIndex35)<->v2387(VarCurr,bitIndex35)))).
% 298.17/296.39  all VarNext (v3426(VarNext)-> (v2387(VarNext,bitIndex35)<->v3428(VarNext))).
% 298.17/296.39  all VarCurr (v3428(VarCurr)<->v2389(VarCurr)&v3429(VarCurr)).
% 298.17/296.39  all VarCurr (v3429(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex956))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex955))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex954))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex953))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex952))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex951))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex950))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex949))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex948))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex947))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex946))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex945))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex944))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex943))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex942))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex941))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex940))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex939))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex938))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex937))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex936))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex935))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex934))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex933))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex932))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex931))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex930))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex929))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex928))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex927))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex926))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex925))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex924))).
% 298.17/296.39  all VarCurr (v3426(VarCurr)<->v3427(VarCurr)&v2585(VarCurr)).
% 298.17/296.39  all VarCurr (-v3427(VarCurr)<->v2547(VarCurr)).
% 298.17/296.39  all VarNext (v803(VarNext,bitIndex35)<->v3417(VarNext,bitIndex35)).
% 298.17/296.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3419(VarNext)-> (all B (range_63_0(B)-> (v3417(VarNext,B)<->v803(VarCurr,B)))))).
% 298.17/296.39  all VarNext (v3419(VarNext)-> (all B (range_63_0(B)-> (v3417(VarNext,B)<->v2377(VarNext,B))))).
% 298.17/296.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3419(VarNext)<->v3420(VarNext)&v2370(VarNext))).
% 298.17/296.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3420(VarNext)<->v3422(VarNext)&v2343(VarNext))).
% 298.17/296.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3422(VarNext)<->v2350(VarNext))).
% 298.17/296.39  all VarNext (v959(VarNext,bitIndex35)<->v3409(VarNext,bitIndex35)).
% 298.17/296.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3410(VarNext)-> (all B (range_63_0(B)-> (v3409(VarNext,B)<->v959(VarCurr,B)))))).
% 298.17/296.39  all VarNext (v3410(VarNext)-> (all B (range_63_0(B)-> (v3409(VarNext,B)<->v2356(VarNext,B))))).
% 298.17/296.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3410(VarNext)<->v3411(VarNext))).
% 298.17/296.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3411(VarNext)<->v3413(VarNext)&v2343(VarNext))).
% 298.17/296.39  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3413(VarNext)<->v2350(VarNext))).
% 298.17/296.39  all VarCurr (v961(VarCurr,bitIndex35)<->v2339(VarCurr,bitIndex35)).
% 298.17/296.39  all VarCurr (-v3407(VarCurr)-> (v2327(VarCurr,bitIndex35)<->$F)).
% 298.17/296.39  all VarCurr (v3407(VarCurr)-> (v2327(VarCurr,bitIndex35)<->v2329(VarCurr))).
% 298.17/296.39  all VarCurr (v3407(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.17/296.39  all VarCurr (v963(VarCurr,bitIndex35)<->v2325(VarCurr,bitIndex35)).
% 298.17/296.39  all VarCurr (-v3405(VarCurr)-> (v965(VarCurr,bitIndex35)<->$F)).
% 298.17/296.40  all VarCurr (v3405(VarCurr)-> (v965(VarCurr,bitIndex35)<->v2201(VarCurr))).
% 298.17/296.40  all VarCurr (v3405(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.17/296.40  all VarCurr (v797(VarCurr,bitIndex36)<->v799(VarCurr,bitIndex36)).
% 298.17/296.40  all VarCurr (v799(VarCurr,bitIndex36)<->v2593(VarCurr,bitIndex36)).
% 298.17/296.40  all VarCurr (v801(VarCurr,bitIndex36)<->v2592(VarCurr,bitIndex36)).
% 298.17/296.40  all VarCurr (v2381(VarCurr,bitIndex36)<->v2383(VarCurr,bitIndex36)).
% 298.17/296.40  all VarCurr (v2383(VarCurr,bitIndex36)<->v2385(VarCurr,bitIndex36)).
% 298.17/296.40  all VarCurr (v2385(VarCurr,bitIndex36)<->v2387(VarCurr,bitIndex36)).
% 298.17/296.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3396(VarNext)-> (v2387(VarNext,bitIndex36)<->v2387(VarCurr,bitIndex36)))).
% 298.17/296.40  all VarNext (v3396(VarNext)-> (v2387(VarNext,bitIndex36)<->v3398(VarNext))).
% 298.17/296.40  all VarCurr (v3398(VarCurr)<->v2389(VarCurr)&v3399(VarCurr)).
% 298.17/296.40  all VarCurr (v3399(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex923))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex922))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex921))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex920))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex919))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex918))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex917))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex916))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex915))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex914))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex913))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex912))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex911))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex910))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex909))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex908))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex907))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex906))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex905))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex904))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex903))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex902))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex901))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex900))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex899))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex898))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex897))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex896))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex895))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex894))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex893))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex892))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex891))).
% 298.17/296.40  all VarCurr (v3396(VarCurr)<->v3397(VarCurr)&v2585(VarCurr)).
% 298.17/296.40  all VarCurr (-v3397(VarCurr)<->v2547(VarCurr)).
% 298.17/296.40  all VarNext (v803(VarNext,bitIndex36)<->v3387(VarNext,bitIndex36)).
% 298.17/296.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3389(VarNext)-> (all B (range_63_0(B)-> (v3387(VarNext,B)<->v803(VarCurr,B)))))).
% 298.17/296.40  all VarNext (v3389(VarNext)-> (all B (range_63_0(B)-> (v3387(VarNext,B)<->v2377(VarNext,B))))).
% 298.17/296.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3389(VarNext)<->v3390(VarNext)&v2370(VarNext))).
% 298.17/296.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3390(VarNext)<->v3392(VarNext)&v2343(VarNext))).
% 298.17/296.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3392(VarNext)<->v2350(VarNext))).
% 298.17/296.40  all VarNext (v959(VarNext,bitIndex36)<->v3379(VarNext,bitIndex36)).
% 298.17/296.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3380(VarNext)-> (all B (range_63_0(B)-> (v3379(VarNext,B)<->v959(VarCurr,B)))))).
% 298.17/296.40  all VarNext (v3380(VarNext)-> (all B (range_63_0(B)-> (v3379(VarNext,B)<->v2356(VarNext,B))))).
% 298.17/296.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3380(VarNext)<->v3381(VarNext))).
% 298.17/296.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3381(VarNext)<->v3383(VarNext)&v2343(VarNext))).
% 298.17/296.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3383(VarNext)<->v2350(VarNext))).
% 298.17/296.40  all VarCurr (v961(VarCurr,bitIndex36)<->v2339(VarCurr,bitIndex36)).
% 298.17/296.40  all VarCurr (-v3377(VarCurr)-> (v2327(VarCurr,bitIndex36)<->$F)).
% 298.17/296.40  all VarCurr (v3377(VarCurr)-> (v2327(VarCurr,bitIndex36)<->v2329(VarCurr))).
% 298.17/296.40  all VarCurr (v3377(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.17/296.40  all VarCurr (v963(VarCurr,bitIndex36)<->v2325(VarCurr,bitIndex36)).
% 298.17/296.40  all VarCurr (-v3375(VarCurr)-> (v965(VarCurr,bitIndex36)<->$F)).
% 298.17/296.40  all VarCurr (v3375(VarCurr)-> (v965(VarCurr,bitIndex36)<->v2201(VarCurr))).
% 298.17/296.40  all VarCurr (v3375(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.17/296.40  all VarCurr (v797(VarCurr,bitIndex37)<->v799(VarCurr,bitIndex37)).
% 298.17/296.40  all VarCurr (v799(VarCurr,bitIndex37)<->v2593(VarCurr,bitIndex37)).
% 298.17/296.40  all VarCurr (v801(VarCurr,bitIndex37)<->v2592(VarCurr,bitIndex37)).
% 298.17/296.40  all VarCurr (v2381(VarCurr,bitIndex37)<->v2383(VarCurr,bitIndex37)).
% 298.17/296.40  all VarCurr (v2383(VarCurr,bitIndex37)<->v2385(VarCurr,bitIndex37)).
% 298.17/296.40  all VarCurr (v2385(VarCurr,bitIndex37)<->v2387(VarCurr,bitIndex37)).
% 298.17/296.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3366(VarNext)-> (v2387(VarNext,bitIndex37)<->v2387(VarCurr,bitIndex37)))).
% 298.17/296.40  all VarNext (v3366(VarNext)-> (v2387(VarNext,bitIndex37)<->v3368(VarNext))).
% 298.17/296.40  all VarCurr (v3368(VarCurr)<->v2389(VarCurr)&v3369(VarCurr)).
% 298.17/296.40  all VarCurr (v3369(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex890))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex889))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex888))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex887))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex886))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex885))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex884))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex883))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex882))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex881))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex880))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex879))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex878))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex877))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex876))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex875))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex874))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex873))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex872))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex871))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex870))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex869))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex868))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex867))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex866))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex865))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex864))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex863))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex862))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex861))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex860))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex859))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex858))).
% 298.17/296.40  all VarCurr (v3366(VarCurr)<->v3367(VarCurr)&v2585(VarCurr)).
% 298.17/296.40  all VarCurr (-v3367(VarCurr)<->v2547(VarCurr)).
% 298.17/296.40  all VarNext (v803(VarNext,bitIndex37)<->v3357(VarNext,bitIndex37)).
% 298.17/296.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3359(VarNext)-> (all B (range_63_0(B)-> (v3357(VarNext,B)<->v803(VarCurr,B)))))).
% 298.17/296.40  all VarNext (v3359(VarNext)-> (all B (range_63_0(B)-> (v3357(VarNext,B)<->v2377(VarNext,B))))).
% 298.17/296.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3359(VarNext)<->v3360(VarNext)&v2370(VarNext))).
% 298.17/296.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3360(VarNext)<->v3362(VarNext)&v2343(VarNext))).
% 298.17/296.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3362(VarNext)<->v2350(VarNext))).
% 298.17/296.41  all VarNext (v959(VarNext,bitIndex37)<->v3349(VarNext,bitIndex37)).
% 298.17/296.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3350(VarNext)-> (all B (range_63_0(B)-> (v3349(VarNext,B)<->v959(VarCurr,B)))))).
% 298.17/296.41  all VarNext (v3350(VarNext)-> (all B (range_63_0(B)-> (v3349(VarNext,B)<->v2356(VarNext,B))))).
% 298.17/296.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3350(VarNext)<->v3351(VarNext))).
% 298.17/296.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3351(VarNext)<->v3353(VarNext)&v2343(VarNext))).
% 298.17/296.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3353(VarNext)<->v2350(VarNext))).
% 298.17/296.41  all VarCurr (v961(VarCurr,bitIndex37)<->v2339(VarCurr,bitIndex37)).
% 298.17/296.41  all VarCurr (-v3347(VarCurr)-> (v2327(VarCurr,bitIndex37)<->$F)).
% 298.17/296.41  all VarCurr (v3347(VarCurr)-> (v2327(VarCurr,bitIndex37)<->v2329(VarCurr))).
% 298.17/296.41  all VarCurr (v3347(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.17/296.41  all VarCurr (v963(VarCurr,bitIndex37)<->v2325(VarCurr,bitIndex37)).
% 298.17/296.41  all VarCurr (-v3345(VarCurr)-> (v965(VarCurr,bitIndex37)<->$F)).
% 298.17/296.41  all VarCurr (v3345(VarCurr)-> (v965(VarCurr,bitIndex37)<->v2201(VarCurr))).
% 298.17/296.41  all VarCurr (v3345(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.17/296.41  all VarCurr (v797(VarCurr,bitIndex38)<->v799(VarCurr,bitIndex38)).
% 298.17/296.41  all VarCurr (v799(VarCurr,bitIndex38)<->v2593(VarCurr,bitIndex38)).
% 298.17/296.41  all VarCurr (v801(VarCurr,bitIndex38)<->v2592(VarCurr,bitIndex38)).
% 298.17/296.41  all VarCurr (v2381(VarCurr,bitIndex38)<->v2383(VarCurr,bitIndex38)).
% 298.17/296.41  all VarCurr (v2383(VarCurr,bitIndex38)<->v2385(VarCurr,bitIndex38)).
% 298.17/296.41  all VarCurr (v2385(VarCurr,bitIndex38)<->v2387(VarCurr,bitIndex38)).
% 298.17/296.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3336(VarNext)-> (v2387(VarNext,bitIndex38)<->v2387(VarCurr,bitIndex38)))).
% 298.17/296.41  all VarNext (v3336(VarNext)-> (v2387(VarNext,bitIndex38)<->v3338(VarNext))).
% 298.17/296.41  all VarCurr (v3338(VarCurr)<->v2389(VarCurr)&v3339(VarCurr)).
% 298.17/296.41  all VarCurr (v3339(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex857))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex856))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex855))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex854))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex853))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex852))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex851))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex850))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex849))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex848))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex847))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex846))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex845))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex844))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex843))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex842))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex841))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex840))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex839))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex838))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex837))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex836))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex835))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex834))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex833))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex832))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex831))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex830))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex829))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex828))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex827))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex826))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex825))).
% 298.17/296.42  all VarCurr (v3336(VarCurr)<->v3337(VarCurr)&v2585(VarCurr)).
% 298.17/296.42  all VarCurr (-v3337(VarCurr)<->v2547(VarCurr)).
% 298.17/296.42  all VarNext (v803(VarNext,bitIndex38)<->v3327(VarNext,bitIndex38)).
% 298.17/296.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3329(VarNext)-> (all B (range_63_0(B)-> (v3327(VarNext,B)<->v803(VarCurr,B)))))).
% 298.17/296.42  all VarNext (v3329(VarNext)-> (all B (range_63_0(B)-> (v3327(VarNext,B)<->v2377(VarNext,B))))).
% 298.17/296.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3329(VarNext)<->v3330(VarNext)&v2370(VarNext))).
% 298.17/296.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3330(VarNext)<->v3332(VarNext)&v2343(VarNext))).
% 298.17/296.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3332(VarNext)<->v2350(VarNext))).
% 298.17/296.42  all VarNext (v959(VarNext,bitIndex38)<->v3319(VarNext,bitIndex38)).
% 298.17/296.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3320(VarNext)-> (all B (range_63_0(B)-> (v3319(VarNext,B)<->v959(VarCurr,B)))))).
% 298.17/296.42  all VarNext (v3320(VarNext)-> (all B (range_63_0(B)-> (v3319(VarNext,B)<->v2356(VarNext,B))))).
% 298.17/296.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3320(VarNext)<->v3321(VarNext))).
% 298.17/296.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3321(VarNext)<->v3323(VarNext)&v2343(VarNext))).
% 298.17/296.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3323(VarNext)<->v2350(VarNext))).
% 298.17/296.42  all VarCurr (v961(VarCurr,bitIndex38)<->v2339(VarCurr,bitIndex38)).
% 298.17/296.42  all VarCurr (-v3317(VarCurr)-> (v2327(VarCurr,bitIndex38)<->$F)).
% 298.17/296.42  all VarCurr (v3317(VarCurr)-> (v2327(VarCurr,bitIndex38)<->v2329(VarCurr))).
% 298.17/296.42  all VarCurr (v3317(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.17/296.42  all VarCurr (v963(VarCurr,bitIndex38)<->v2325(VarCurr,bitIndex38)).
% 298.17/296.42  all VarCurr (-v3315(VarCurr)-> (v965(VarCurr,bitIndex38)<->$F)).
% 298.17/296.42  all VarCurr (v3315(VarCurr)-> (v965(VarCurr,bitIndex38)<->v2201(VarCurr))).
% 298.17/296.42  all VarCurr (v3315(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.17/296.42  all VarCurr (v797(VarCurr,bitIndex39)<->v799(VarCurr,bitIndex39)).
% 298.17/296.42  all VarCurr (v799(VarCurr,bitIndex39)<->v2593(VarCurr,bitIndex39)).
% 298.17/296.42  all VarCurr (v801(VarCurr,bitIndex39)<->v2592(VarCurr,bitIndex39)).
% 298.17/296.42  all VarCurr (v2381(VarCurr,bitIndex39)<->v2383(VarCurr,bitIndex39)).
% 298.17/296.42  all VarCurr (v2383(VarCurr,bitIndex39)<->v2385(VarCurr,bitIndex39)).
% 298.17/296.42  all VarCurr (v2385(VarCurr,bitIndex39)<->v2387(VarCurr,bitIndex39)).
% 298.17/296.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3306(VarNext)-> (v2387(VarNext,bitIndex39)<->v2387(VarCurr,bitIndex39)))).
% 298.17/296.42  all VarNext (v3306(VarNext)-> (v2387(VarNext,bitIndex39)<->v3308(VarNext))).
% 298.17/296.42  all VarCurr (v3308(VarCurr)<->v2389(VarCurr)&v3309(VarCurr)).
% 298.17/296.42  all VarCurr (v3309(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex824))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex823))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex822))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex821))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex820))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex819))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex818))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex817))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex816))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex815))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex814))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex813))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex812))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex811))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex810))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex809))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex808))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex807))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex806))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex805))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex804))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex803))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex802))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex801))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex800))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex799))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex798))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex797))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex796))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex795))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex794))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex793))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex792))).
% 298.17/296.42  all VarCurr (v3306(VarCurr)<->v3307(VarCurr)&v2585(VarCurr)).
% 298.17/296.42  all VarCurr (-v3307(VarCurr)<->v2547(VarCurr)).
% 298.17/296.42  all VarNext (v803(VarNext,bitIndex39)<->v3297(VarNext,bitIndex39)).
% 298.17/296.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3299(VarNext)-> (all B (range_63_0(B)-> (v3297(VarNext,B)<->v803(VarCurr,B)))))).
% 298.17/296.42  all VarNext (v3299(VarNext)-> (all B (range_63_0(B)-> (v3297(VarNext,B)<->v2377(VarNext,B))))).
% 298.17/296.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3299(VarNext)<->v3300(VarNext)&v2370(VarNext))).
% 298.17/296.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3300(VarNext)<->v3302(VarNext)&v2343(VarNext))).
% 298.17/296.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3302(VarNext)<->v2350(VarNext))).
% 298.17/296.42  all VarNext (v959(VarNext,bitIndex39)<->v3289(VarNext,bitIndex39)).
% 298.17/296.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3290(VarNext)-> (all B (range_63_0(B)-> (v3289(VarNext,B)<->v959(VarCurr,B)))))).
% 298.17/296.42  all VarNext (v3290(VarNext)-> (all B (range_63_0(B)-> (v3289(VarNext,B)<->v2356(VarNext,B))))).
% 298.17/296.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3290(VarNext)<->v3291(VarNext))).
% 298.17/296.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3291(VarNext)<->v3293(VarNext)&v2343(VarNext))).
% 298.17/296.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3293(VarNext)<->v2350(VarNext))).
% 298.17/296.42  all VarCurr (v961(VarCurr,bitIndex39)<->v2339(VarCurr,bitIndex39)).
% 298.17/296.42  all VarCurr (-v3287(VarCurr)-> (v2327(VarCurr,bitIndex39)<->$F)).
% 298.17/296.42  all VarCurr (v3287(VarCurr)-> (v2327(VarCurr,bitIndex39)<->v2329(VarCurr))).
% 298.17/296.42  all VarCurr (v3287(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.17/296.42  all VarCurr (v963(VarCurr,bitIndex39)<->v2325(VarCurr,bitIndex39)).
% 298.17/296.42  all VarCurr (-v3285(VarCurr)-> (v965(VarCurr,bitIndex39)<->$F)).
% 298.17/296.42  all VarCurr (v3285(VarCurr)-> (v965(VarCurr,bitIndex39)<->v2201(VarCurr))).
% 298.17/296.42  all VarCurr (v3285(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.17/296.42  all VarCurr (v797(VarCurr,bitIndex40)<->v799(VarCurr,bitIndex40)).
% 298.17/296.42  all VarCurr (v799(VarCurr,bitIndex40)<->v2593(VarCurr,bitIndex40)).
% 298.17/296.42  all VarCurr (v801(VarCurr,bitIndex40)<->v2592(VarCurr,bitIndex40)).
% 298.17/296.42  all VarCurr (v2381(VarCurr,bitIndex40)<->v2383(VarCurr,bitIndex40)).
% 298.17/296.42  all VarCurr (v2383(VarCurr,bitIndex40)<->v2385(VarCurr,bitIndex40)).
% 298.17/296.42  all VarCurr (v2385(VarCurr,bitIndex40)<->v2387(VarCurr,bitIndex40)).
% 298.17/296.42  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3276(VarNext)-> (v2387(VarNext,bitIndex40)<->v2387(VarCurr,bitIndex40)))).
% 298.17/296.42  all VarNext (v3276(VarNext)-> (v2387(VarNext,bitIndex40)<->v3278(VarNext))).
% 298.17/296.42  all VarCurr (v3278(VarCurr)<->v2389(VarCurr)&v3279(VarCurr)).
% 298.17/296.42  all VarCurr (v3279(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex791))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex790))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex789))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex788))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex787))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex786))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex785))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex784))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex783))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex782))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex781))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex780))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex779))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex778))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex777))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex776))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex775))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex774))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex773))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex772))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex771))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex770))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex769))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex768))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex767))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex766))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex765))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex764))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex763))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex762))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex761))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex760))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex759))).
% 298.24/296.43  all VarCurr (v3276(VarCurr)<->v3277(VarCurr)&v2585(VarCurr)).
% 298.24/296.43  all VarCurr (-v3277(VarCurr)<->v2547(VarCurr)).
% 298.24/296.43  all VarNext (v803(VarNext,bitIndex40)<->v3267(VarNext,bitIndex40)).
% 298.24/296.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3269(VarNext)-> (all B (range_63_0(B)-> (v3267(VarNext,B)<->v803(VarCurr,B)))))).
% 298.24/296.43  all VarNext (v3269(VarNext)-> (all B (range_63_0(B)-> (v3267(VarNext,B)<->v2377(VarNext,B))))).
% 298.24/296.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3269(VarNext)<->v3270(VarNext)&v2370(VarNext))).
% 298.24/296.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3270(VarNext)<->v3272(VarNext)&v2343(VarNext))).
% 298.24/296.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3272(VarNext)<->v2350(VarNext))).
% 298.24/296.43  all VarNext (v959(VarNext,bitIndex40)<->v3259(VarNext,bitIndex40)).
% 298.24/296.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3260(VarNext)-> (all B (range_63_0(B)-> (v3259(VarNext,B)<->v959(VarCurr,B)))))).
% 298.24/296.43  all VarNext (v3260(VarNext)-> (all B (range_63_0(B)-> (v3259(VarNext,B)<->v2356(VarNext,B))))).
% 298.24/296.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3260(VarNext)<->v3261(VarNext))).
% 298.24/296.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3261(VarNext)<->v3263(VarNext)&v2343(VarNext))).
% 298.24/296.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3263(VarNext)<->v2350(VarNext))).
% 298.24/296.43  all VarCurr (v961(VarCurr,bitIndex40)<->v2339(VarCurr,bitIndex40)).
% 298.24/296.43  all VarCurr (-v3257(VarCurr)-> (v2327(VarCurr,bitIndex40)<->$F)).
% 298.24/296.43  all VarCurr (v3257(VarCurr)-> (v2327(VarCurr,bitIndex40)<->v2329(VarCurr))).
% 298.24/296.43  all VarCurr (v3257(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.24/296.43  all VarCurr (v963(VarCurr,bitIndex40)<->v2325(VarCurr,bitIndex40)).
% 298.24/296.43  all VarCurr (-v3255(VarCurr)-> (v965(VarCurr,bitIndex40)<->$F)).
% 298.24/296.43  all VarCurr (v3255(VarCurr)-> (v965(VarCurr,bitIndex40)<->v2201(VarCurr))).
% 298.24/296.43  all VarCurr (v3255(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.24/296.43  all VarCurr (v797(VarCurr,bitIndex41)<->v799(VarCurr,bitIndex41)).
% 298.24/296.43  all VarCurr (v799(VarCurr,bitIndex41)<->v2593(VarCurr,bitIndex41)).
% 298.24/296.43  all VarCurr (v801(VarCurr,bitIndex41)<->v2592(VarCurr,bitIndex41)).
% 298.24/296.43  all VarCurr (v2381(VarCurr,bitIndex41)<->v2383(VarCurr,bitIndex41)).
% 298.24/296.43  all VarCurr (v2383(VarCurr,bitIndex41)<->v2385(VarCurr,bitIndex41)).
% 298.24/296.43  all VarCurr (v2385(VarCurr,bitIndex41)<->v2387(VarCurr,bitIndex41)).
% 298.24/296.43  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3246(VarNext)-> (v2387(VarNext,bitIndex41)<->v2387(VarCurr,bitIndex41)))).
% 298.24/296.44  all VarNext (v3246(VarNext)-> (v2387(VarNext,bitIndex41)<->v3248(VarNext))).
% 298.24/296.44  all VarCurr (v3248(VarCurr)<->v2389(VarCurr)&v3249(VarCurr)).
% 298.24/296.44  all VarCurr (v3249(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex758))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex757))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex756))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex755))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex754))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex753))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex752))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex751))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex750))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex749))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex748))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex747))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex746))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex745))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex744))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex743))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex742))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex741))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex740))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex739))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex738))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex737))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex736))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex735))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex734))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex733))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex732))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex731))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex730))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex729))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex728))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex727))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex726))).
% 298.24/296.44  all VarCurr (v3246(VarCurr)<->v3247(VarCurr)&v2585(VarCurr)).
% 298.24/296.44  all VarCurr (-v3247(VarCurr)<->v2547(VarCurr)).
% 298.24/296.44  all VarNext (v803(VarNext,bitIndex41)<->v3237(VarNext,bitIndex41)).
% 298.24/296.44  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3239(VarNext)-> (all B (range_63_0(B)-> (v3237(VarNext,B)<->v803(VarCurr,B)))))).
% 298.24/296.44  all VarNext (v3239(VarNext)-> (all B (range_63_0(B)-> (v3237(VarNext,B)<->v2377(VarNext,B))))).
% 298.24/296.44  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3239(VarNext)<->v3240(VarNext)&v2370(VarNext))).
% 298.24/296.44  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3240(VarNext)<->v3242(VarNext)&v2343(VarNext))).
% 298.24/296.44  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3242(VarNext)<->v2350(VarNext))).
% 298.24/296.44  all VarNext (v959(VarNext,bitIndex41)<->v3229(VarNext,bitIndex41)).
% 298.24/296.44  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3230(VarNext)-> (all B (range_63_0(B)-> (v3229(VarNext,B)<->v959(VarCurr,B)))))).
% 298.24/296.44  all VarNext (v3230(VarNext)-> (all B (range_63_0(B)-> (v3229(VarNext,B)<->v2356(VarNext,B))))).
% 298.24/296.44  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3230(VarNext)<->v3231(VarNext))).
% 298.24/296.44  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3231(VarNext)<->v3233(VarNext)&v2343(VarNext))).
% 298.24/296.44  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3233(VarNext)<->v2350(VarNext))).
% 298.24/296.44  all VarCurr (v961(VarCurr,bitIndex41)<->v2339(VarCurr,bitIndex41)).
% 298.24/296.44  all VarCurr (-v3227(VarCurr)-> (v2327(VarCurr,bitIndex41)<->$F)).
% 298.24/296.44  all VarCurr (v3227(VarCurr)-> (v2327(VarCurr,bitIndex41)<->v2329(VarCurr))).
% 298.24/296.44  all VarCurr (v3227(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.24/296.44  all VarCurr (v963(VarCurr,bitIndex41)<->v2325(VarCurr,bitIndex41)).
% 298.24/296.44  all VarCurr (-v3225(VarCurr)-> (v965(VarCurr,bitIndex41)<->$F)).
% 298.24/296.44  all VarCurr (v3225(VarCurr)-> (v965(VarCurr,bitIndex41)<->v2201(VarCurr))).
% 298.24/296.44  all VarCurr (v3225(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.24/296.45  all VarCurr (v797(VarCurr,bitIndex42)<->v799(VarCurr,bitIndex42)).
% 298.24/296.45  all VarCurr (v799(VarCurr,bitIndex42)<->v2593(VarCurr,bitIndex42)).
% 298.24/296.45  all VarCurr (v801(VarCurr,bitIndex42)<->v2592(VarCurr,bitIndex42)).
% 298.24/296.45  all VarCurr (v2381(VarCurr,bitIndex42)<->v2383(VarCurr,bitIndex42)).
% 298.24/296.45  all VarCurr (v2383(VarCurr,bitIndex42)<->v2385(VarCurr,bitIndex42)).
% 298.24/296.45  all VarCurr (v2385(VarCurr,bitIndex42)<->v2387(VarCurr,bitIndex42)).
% 298.24/296.45  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3216(VarNext)-> (v2387(VarNext,bitIndex42)<->v2387(VarCurr,bitIndex42)))).
% 298.24/296.45  all VarNext (v3216(VarNext)-> (v2387(VarNext,bitIndex42)<->v3218(VarNext))).
% 298.24/296.45  all VarCurr (v3218(VarCurr)<->v2389(VarCurr)&v3219(VarCurr)).
% 298.24/296.45  all VarCurr (v3219(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex725))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex724))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex723))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex722))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex721))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex720))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex719))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex718))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex717))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex716))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex715))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex714))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex713))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex712))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex711))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex710))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex709))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex708))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex707))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex706))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex705))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex704))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex703))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex702))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex701))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex700))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex699))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex698))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex697))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex696))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex695))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex694))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex693))).
% 298.24/296.45  all VarCurr (v3216(VarCurr)<->v3217(VarCurr)&v2585(VarCurr)).
% 298.24/296.45  all VarCurr (-v3217(VarCurr)<->v2547(VarCurr)).
% 298.24/296.45  all VarNext (v803(VarNext,bitIndex42)<->v3207(VarNext,bitIndex42)).
% 298.24/296.45  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3209(VarNext)-> (all B (range_63_0(B)-> (v3207(VarNext,B)<->v803(VarCurr,B)))))).
% 298.24/296.45  all VarNext (v3209(VarNext)-> (all B (range_63_0(B)-> (v3207(VarNext,B)<->v2377(VarNext,B))))).
% 298.24/296.45  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3209(VarNext)<->v3210(VarNext)&v2370(VarNext))).
% 298.24/296.45  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3210(VarNext)<->v3212(VarNext)&v2343(VarNext))).
% 298.24/296.45  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3212(VarNext)<->v2350(VarNext))).
% 298.24/296.45  all VarNext (v959(VarNext,bitIndex42)<->v3199(VarNext,bitIndex42)).
% 298.24/296.45  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3200(VarNext)-> (all B (range_63_0(B)-> (v3199(VarNext,B)<->v959(VarCurr,B)))))).
% 298.24/296.45  all VarNext (v3200(VarNext)-> (all B (range_63_0(B)-> (v3199(VarNext,B)<->v2356(VarNext,B))))).
% 298.24/296.45  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3200(VarNext)<->v3201(VarNext))).
% 298.24/296.45  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3201(VarNext)<->v3203(VarNext)&v2343(VarNext))).
% 298.24/296.45  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3203(VarNext)<->v2350(VarNext))).
% 298.24/296.45  all VarCurr (v961(VarCurr,bitIndex42)<->v2339(VarCurr,bitIndex42)).
% 298.24/296.45  all VarCurr (-v3197(VarCurr)-> (v2327(VarCurr,bitIndex42)<->$F)).
% 298.24/296.45  all VarCurr (v3197(VarCurr)-> (v2327(VarCurr,bitIndex42)<->v2329(VarCurr))).
% 298.24/296.45  all VarCurr (v3197(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.24/296.45  all VarCurr (v963(VarCurr,bitIndex42)<->v2325(VarCurr,bitIndex42)).
% 298.24/296.45  all VarCurr (-v3195(VarCurr)-> (v965(VarCurr,bitIndex42)<->$F)).
% 298.24/296.45  all VarCurr (v3195(VarCurr)-> (v965(VarCurr,bitIndex42)<->v2201(VarCurr))).
% 298.24/296.45  all VarCurr (v3195(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.24/296.45  all VarCurr (v797(VarCurr,bitIndex43)<->v799(VarCurr,bitIndex43)).
% 298.24/296.45  all VarCurr (v799(VarCurr,bitIndex43)<->v2593(VarCurr,bitIndex43)).
% 298.24/296.45  all VarCurr (v801(VarCurr,bitIndex43)<->v2592(VarCurr,bitIndex43)).
% 298.24/296.45  all VarCurr (v2381(VarCurr,bitIndex43)<->v2383(VarCurr,bitIndex43)).
% 298.24/296.45  all VarCurr (v2383(VarCurr,bitIndex43)<->v2385(VarCurr,bitIndex43)).
% 298.24/296.45  all VarCurr (v2385(VarCurr,bitIndex43)<->v2387(VarCurr,bitIndex43)).
% 298.24/296.45  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3186(VarNext)-> (v2387(VarNext,bitIndex43)<->v2387(VarCurr,bitIndex43)))).
% 298.24/296.45  all VarNext (v3186(VarNext)-> (v2387(VarNext,bitIndex43)<->v3188(VarNext))).
% 298.24/296.45  all VarCurr (v3188(VarCurr)<->v2389(VarCurr)&v3189(VarCurr)).
% 298.24/296.45  all VarCurr (v3189(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex692))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex691))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex690))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex689))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex688))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex687))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex686))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex685))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex684))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex683))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex682))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex681))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex680))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex679))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex678))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex677))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex676))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex675))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex674))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex673))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex672))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex671))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex670))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex669))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex668))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex667))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex666))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex665))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex664))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex663))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex662))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex661))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex660))).
% 298.24/296.45  all VarCurr (v3186(VarCurr)<->v3187(VarCurr)&v2585(VarCurr)).
% 298.24/296.45  all VarCurr (-v3187(VarCurr)<->v2547(VarCurr)).
% 298.24/296.45  all VarNext (v803(VarNext,bitIndex43)<->v3177(VarNext,bitIndex43)).
% 298.24/296.45  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3179(VarNext)-> (all B (range_63_0(B)-> (v3177(VarNext,B)<->v803(VarCurr,B)))))).
% 298.24/296.45  all VarNext (v3179(VarNext)-> (all B (range_63_0(B)-> (v3177(VarNext,B)<->v2377(VarNext,B))))).
% 298.24/296.45  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3179(VarNext)<->v3180(VarNext)&v2370(VarNext))).
% 298.24/296.45  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3180(VarNext)<->v3182(VarNext)&v2343(VarNext))).
% 298.24/296.45  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3182(VarNext)<->v2350(VarNext))).
% 298.24/296.46  all VarNext (v959(VarNext,bitIndex43)<->v3169(VarNext,bitIndex43)).
% 298.24/296.46  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3170(VarNext)-> (all B (range_63_0(B)-> (v3169(VarNext,B)<->v959(VarCurr,B)))))).
% 298.24/296.46  all VarNext (v3170(VarNext)-> (all B (range_63_0(B)-> (v3169(VarNext,B)<->v2356(VarNext,B))))).
% 298.24/296.46  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3170(VarNext)<->v3171(VarNext))).
% 298.24/296.46  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3171(VarNext)<->v3173(VarNext)&v2343(VarNext))).
% 298.24/296.46  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3173(VarNext)<->v2350(VarNext))).
% 298.24/296.46  all VarCurr (v961(VarCurr,bitIndex43)<->v2339(VarCurr,bitIndex43)).
% 298.24/296.46  all VarCurr (-v3167(VarCurr)-> (v2327(VarCurr,bitIndex43)<->$F)).
% 298.24/296.46  all VarCurr (v3167(VarCurr)-> (v2327(VarCurr,bitIndex43)<->v2329(VarCurr))).
% 298.24/296.46  all VarCurr (v3167(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.24/296.46  all VarCurr (v963(VarCurr,bitIndex43)<->v2325(VarCurr,bitIndex43)).
% 298.24/296.46  all VarCurr (-v3165(VarCurr)-> (v965(VarCurr,bitIndex43)<->$F)).
% 298.24/296.46  all VarCurr (v3165(VarCurr)-> (v965(VarCurr,bitIndex43)<->v2201(VarCurr))).
% 298.24/296.46  all VarCurr (v3165(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.24/296.46  all VarCurr (v797(VarCurr,bitIndex44)<->v799(VarCurr,bitIndex44)).
% 298.24/296.46  all VarCurr (v799(VarCurr,bitIndex44)<->v2593(VarCurr,bitIndex44)).
% 298.24/296.46  all VarCurr (v801(VarCurr,bitIndex44)<->v2592(VarCurr,bitIndex44)).
% 298.24/296.46  all VarCurr (v2381(VarCurr,bitIndex44)<->v2383(VarCurr,bitIndex44)).
% 298.24/296.46  all VarCurr (v2383(VarCurr,bitIndex44)<->v2385(VarCurr,bitIndex44)).
% 298.24/296.46  all VarCurr (v2385(VarCurr,bitIndex44)<->v2387(VarCurr,bitIndex44)).
% 298.24/296.46  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3156(VarNext)-> (v2387(VarNext,bitIndex44)<->v2387(VarCurr,bitIndex44)))).
% 298.24/296.46  all VarNext (v3156(VarNext)-> (v2387(VarNext,bitIndex44)<->v3158(VarNext))).
% 298.24/296.46  all VarCurr (v3158(VarCurr)<->v2389(VarCurr)&v3159(VarCurr)).
% 298.24/296.46  all VarCurr (v3159(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex659))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex658))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex657))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex656))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex655))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex654))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex653))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex652))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex651))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex650))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex649))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex648))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex647))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex646))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex645))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex644))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex643))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex642))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex641))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex640))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex639))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex638))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex637))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex636))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex635))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex634))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex633))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex632))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex631))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex630))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex629))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex628))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex627))).
% 298.27/296.47  all VarCurr (v3156(VarCurr)<->v3157(VarCurr)&v2585(VarCurr)).
% 298.27/296.47  all VarCurr (-v3157(VarCurr)<->v2547(VarCurr)).
% 298.27/296.47  all VarNext (v803(VarNext,bitIndex44)<->v3147(VarNext,bitIndex44)).
% 298.27/296.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3149(VarNext)-> (all B (range_63_0(B)-> (v3147(VarNext,B)<->v803(VarCurr,B)))))).
% 298.27/296.47  all VarNext (v3149(VarNext)-> (all B (range_63_0(B)-> (v3147(VarNext,B)<->v2377(VarNext,B))))).
% 298.27/296.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3149(VarNext)<->v3150(VarNext)&v2370(VarNext))).
% 298.27/296.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3150(VarNext)<->v3152(VarNext)&v2343(VarNext))).
% 298.27/296.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3152(VarNext)<->v2350(VarNext))).
% 298.27/296.47  all VarNext (v959(VarNext,bitIndex44)<->v3139(VarNext,bitIndex44)).
% 298.27/296.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3140(VarNext)-> (all B (range_63_0(B)-> (v3139(VarNext,B)<->v959(VarCurr,B)))))).
% 298.27/296.47  all VarNext (v3140(VarNext)-> (all B (range_63_0(B)-> (v3139(VarNext,B)<->v2356(VarNext,B))))).
% 298.27/296.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3140(VarNext)<->v3141(VarNext))).
% 298.27/296.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3141(VarNext)<->v3143(VarNext)&v2343(VarNext))).
% 298.27/296.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3143(VarNext)<->v2350(VarNext))).
% 298.27/296.47  all VarCurr (v961(VarCurr,bitIndex44)<->v2339(VarCurr,bitIndex44)).
% 298.27/296.47  all VarCurr (-v3137(VarCurr)-> (v2327(VarCurr,bitIndex44)<->$F)).
% 298.27/296.47  all VarCurr (v3137(VarCurr)-> (v2327(VarCurr,bitIndex44)<->v2329(VarCurr))).
% 298.27/296.47  all VarCurr (v3137(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.27/296.47  all VarCurr (v963(VarCurr,bitIndex44)<->v2325(VarCurr,bitIndex44)).
% 298.27/296.47  all VarCurr (-v3135(VarCurr)-> (v965(VarCurr,bitIndex44)<->$F)).
% 298.27/296.47  all VarCurr (v3135(VarCurr)-> (v965(VarCurr,bitIndex44)<->v2201(VarCurr))).
% 298.27/296.47  all VarCurr (v3135(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.27/296.47  all VarCurr (v797(VarCurr,bitIndex45)<->v799(VarCurr,bitIndex45)).
% 298.27/296.47  all VarCurr (v799(VarCurr,bitIndex45)<->v2593(VarCurr,bitIndex45)).
% 298.27/296.47  all VarCurr (v801(VarCurr,bitIndex45)<->v2592(VarCurr,bitIndex45)).
% 298.27/296.47  all VarCurr (v2381(VarCurr,bitIndex45)<->v2383(VarCurr,bitIndex45)).
% 298.27/296.47  all VarCurr (v2383(VarCurr,bitIndex45)<->v2385(VarCurr,bitIndex45)).
% 298.27/296.47  all VarCurr (v2385(VarCurr,bitIndex45)<->v2387(VarCurr,bitIndex45)).
% 298.27/296.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3126(VarNext)-> (v2387(VarNext,bitIndex45)<->v2387(VarCurr,bitIndex45)))).
% 298.27/296.47  all VarNext (v3126(VarNext)-> (v2387(VarNext,bitIndex45)<->v3128(VarNext))).
% 298.27/296.47  all VarCurr (v3128(VarCurr)<->v2389(VarCurr)&v3129(VarCurr)).
% 298.27/296.47  all VarCurr (v3129(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex626))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex625))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex624))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex623))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex622))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex621))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex620))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex619))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex618))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex617))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex616))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex615))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex614))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex613))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex612))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex611))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex610))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex609))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex608))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex607))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex606))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex605))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex604))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex603))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex602))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex601))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex600))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex599))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex598))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex597))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex596))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex595))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex594))).
% 298.27/296.47  all VarCurr (v3126(VarCurr)<->v3127(VarCurr)&v2585(VarCurr)).
% 298.27/296.47  all VarCurr (-v3127(VarCurr)<->v2547(VarCurr)).
% 298.27/296.47  all VarNext (v803(VarNext,bitIndex45)<->v3117(VarNext,bitIndex45)).
% 298.27/296.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3119(VarNext)-> (all B (range_63_0(B)-> (v3117(VarNext,B)<->v803(VarCurr,B)))))).
% 298.27/296.47  all VarNext (v3119(VarNext)-> (all B (range_63_0(B)-> (v3117(VarNext,B)<->v2377(VarNext,B))))).
% 298.27/296.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3119(VarNext)<->v3120(VarNext)&v2370(VarNext))).
% 298.27/296.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3120(VarNext)<->v3122(VarNext)&v2343(VarNext))).
% 298.27/296.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3122(VarNext)<->v2350(VarNext))).
% 298.27/296.47  all VarNext (v959(VarNext,bitIndex45)<->v3109(VarNext,bitIndex45)).
% 298.27/296.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3110(VarNext)-> (all B (range_63_0(B)-> (v3109(VarNext,B)<->v959(VarCurr,B)))))).
% 298.27/296.47  all VarNext (v3110(VarNext)-> (all B (range_63_0(B)-> (v3109(VarNext,B)<->v2356(VarNext,B))))).
% 298.27/296.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3110(VarNext)<->v3111(VarNext))).
% 298.27/296.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3111(VarNext)<->v3113(VarNext)&v2343(VarNext))).
% 298.27/296.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3113(VarNext)<->v2350(VarNext))).
% 298.27/296.47  all VarCurr (v961(VarCurr,bitIndex45)<->v2339(VarCurr,bitIndex45)).
% 298.27/296.47  all VarCurr (-v3107(VarCurr)-> (v2327(VarCurr,bitIndex45)<->$F)).
% 298.27/296.47  all VarCurr (v3107(VarCurr)-> (v2327(VarCurr,bitIndex45)<->v2329(VarCurr))).
% 298.27/296.47  all VarCurr (v3107(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.27/296.47  all VarCurr (v963(VarCurr,bitIndex45)<->v2325(VarCurr,bitIndex45)).
% 298.27/296.47  all VarCurr (-v3105(VarCurr)-> (v965(VarCurr,bitIndex45)<->$F)).
% 298.27/296.47  all VarCurr (v3105(VarCurr)-> (v965(VarCurr,bitIndex45)<->v2201(VarCurr))).
% 298.27/296.47  all VarCurr (v3105(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.27/296.47  all VarCurr (v797(VarCurr,bitIndex46)<->v799(VarCurr,bitIndex46)).
% 298.27/296.47  all VarCurr (v799(VarCurr,bitIndex46)<->v2593(VarCurr,bitIndex46)).
% 298.27/296.47  all VarCurr (v801(VarCurr,bitIndex46)<->v2592(VarCurr,bitIndex46)).
% 298.27/296.47  all VarCurr (v2381(VarCurr,bitIndex46)<->v2383(VarCurr,bitIndex46)).
% 298.27/296.47  all VarCurr (v2383(VarCurr,bitIndex46)<->v2385(VarCurr,bitIndex46)).
% 298.27/296.47  all VarCurr (v2385(VarCurr,bitIndex46)<->v2387(VarCurr,bitIndex46)).
% 298.27/296.47  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3096(VarNext)-> (v2387(VarNext,bitIndex46)<->v2387(VarCurr,bitIndex46)))).
% 298.27/296.47  all VarNext (v3096(VarNext)-> (v2387(VarNext,bitIndex46)<->v3098(VarNext))).
% 298.27/296.47  all VarCurr (v3098(VarCurr)<->v2389(VarCurr)&v3099(VarCurr)).
% 298.27/296.47  all VarCurr (v3099(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex593))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex592))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex591))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex590))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex589))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex588))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex587))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex586))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex585))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex584))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex583))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex582))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex581))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex580))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex579))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex578))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex577))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex576))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex575))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex574))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex573))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex572))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex571))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex570))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex569))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex568))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex567))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex566))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex565))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex564))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex563))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex562))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex561))).
% 298.27/296.48  all VarCurr (v3096(VarCurr)<->v3097(VarCurr)&v2585(VarCurr)).
% 298.27/296.48  all VarCurr (-v3097(VarCurr)<->v2547(VarCurr)).
% 298.27/296.48  all VarNext (v803(VarNext,bitIndex46)<->v3087(VarNext,bitIndex46)).
% 298.27/296.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3089(VarNext)-> (all B (range_63_0(B)-> (v3087(VarNext,B)<->v803(VarCurr,B)))))).
% 298.27/296.48  all VarNext (v3089(VarNext)-> (all B (range_63_0(B)-> (v3087(VarNext,B)<->v2377(VarNext,B))))).
% 298.27/296.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3089(VarNext)<->v3090(VarNext)&v2370(VarNext))).
% 298.27/296.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3090(VarNext)<->v3092(VarNext)&v2343(VarNext))).
% 298.27/296.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3092(VarNext)<->v2350(VarNext))).
% 298.27/296.48  all VarNext (v959(VarNext,bitIndex46)<->v3079(VarNext,bitIndex46)).
% 298.27/296.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3080(VarNext)-> (all B (range_63_0(B)-> (v3079(VarNext,B)<->v959(VarCurr,B)))))).
% 298.27/296.48  all VarNext (v3080(VarNext)-> (all B (range_63_0(B)-> (v3079(VarNext,B)<->v2356(VarNext,B))))).
% 298.27/296.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3080(VarNext)<->v3081(VarNext))).
% 298.27/296.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3081(VarNext)<->v3083(VarNext)&v2343(VarNext))).
% 298.27/296.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3083(VarNext)<->v2350(VarNext))).
% 298.27/296.48  all VarCurr (v961(VarCurr,bitIndex46)<->v2339(VarCurr,bitIndex46)).
% 298.27/296.48  all VarCurr (-v3077(VarCurr)-> (v2327(VarCurr,bitIndex46)<->$F)).
% 298.27/296.48  all VarCurr (v3077(VarCurr)-> (v2327(VarCurr,bitIndex46)<->v2329(VarCurr))).
% 298.27/296.48  all VarCurr (v3077(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.27/296.48  all VarCurr (v963(VarCurr,bitIndex46)<->v2325(VarCurr,bitIndex46)).
% 298.27/296.48  all VarCurr (-v3075(VarCurr)-> (v965(VarCurr,bitIndex46)<->$F)).
% 298.27/296.48  all VarCurr (v3075(VarCurr)-> (v965(VarCurr,bitIndex46)<->v2201(VarCurr))).
% 298.27/296.48  all VarCurr (v3075(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.27/296.48  all VarCurr (v797(VarCurr,bitIndex47)<->v799(VarCurr,bitIndex47)).
% 298.27/296.48  all VarCurr (v799(VarCurr,bitIndex47)<->v2593(VarCurr,bitIndex47)).
% 298.27/296.48  all VarCurr (v801(VarCurr,bitIndex47)<->v2592(VarCurr,bitIndex47)).
% 298.27/296.48  all VarCurr (v2381(VarCurr,bitIndex47)<->v2383(VarCurr,bitIndex47)).
% 298.27/296.48  all VarCurr (v2383(VarCurr,bitIndex47)<->v2385(VarCurr,bitIndex47)).
% 298.27/296.48  all VarCurr (v2385(VarCurr,bitIndex47)<->v2387(VarCurr,bitIndex47)).
% 298.27/296.48  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3066(VarNext)-> (v2387(VarNext,bitIndex47)<->v2387(VarCurr,bitIndex47)))).
% 298.27/296.48  all VarNext (v3066(VarNext)-> (v2387(VarNext,bitIndex47)<->v3068(VarNext))).
% 298.27/296.48  all VarCurr (v3068(VarCurr)<->v2389(VarCurr)&v3069(VarCurr)).
% 298.27/296.49  all VarCurr (v3069(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex560))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex559))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex558))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex557))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex556))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex555))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex554))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex553))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex552))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex551))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex550))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex549))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex548))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex547))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex546))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex545))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex544))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex543))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex542))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex541))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex540))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex539))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex538))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex537))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex536))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex535))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex534))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex533))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex532))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex531))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex530))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex529))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex528))).
% 298.27/296.49  all VarCurr (v3066(VarCurr)<->v3067(VarCurr)&v2585(VarCurr)).
% 298.27/296.49  all VarCurr (-v3067(VarCurr)<->v2547(VarCurr)).
% 298.27/296.49  all VarNext (v803(VarNext,bitIndex47)<->v3057(VarNext,bitIndex47)).
% 298.27/296.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3059(VarNext)-> (all B (range_63_0(B)-> (v3057(VarNext,B)<->v803(VarCurr,B)))))).
% 298.27/296.49  all VarNext (v3059(VarNext)-> (all B (range_63_0(B)-> (v3057(VarNext,B)<->v2377(VarNext,B))))).
% 298.27/296.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3059(VarNext)<->v3060(VarNext)&v2370(VarNext))).
% 298.27/296.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3060(VarNext)<->v3062(VarNext)&v2343(VarNext))).
% 298.27/296.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3062(VarNext)<->v2350(VarNext))).
% 298.27/296.49  all VarNext (v959(VarNext,bitIndex47)<->v3049(VarNext,bitIndex47)).
% 298.27/296.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3050(VarNext)-> (all B (range_63_0(B)-> (v3049(VarNext,B)<->v959(VarCurr,B)))))).
% 298.27/296.49  all VarNext (v3050(VarNext)-> (all B (range_63_0(B)-> (v3049(VarNext,B)<->v2356(VarNext,B))))).
% 298.27/296.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3050(VarNext)<->v3051(VarNext))).
% 298.27/296.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3051(VarNext)<->v3053(VarNext)&v2343(VarNext))).
% 298.27/296.49  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3053(VarNext)<->v2350(VarNext))).
% 298.27/296.49  all VarCurr (v961(VarCurr,bitIndex47)<->v2339(VarCurr,bitIndex47)).
% 298.27/296.49  all VarCurr (-v3047(VarCurr)-> (v2327(VarCurr,bitIndex47)<->$F)).
% 298.27/296.49  all VarCurr (v3047(VarCurr)-> (v2327(VarCurr,bitIndex47)<->v2329(VarCurr))).
% 298.27/296.49  all VarCurr (v3047(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.27/296.49  all VarCurr (v963(VarCurr,bitIndex47)<->v2325(VarCurr,bitIndex47)).
% 298.27/296.49  all VarCurr (-v3045(VarCurr)-> (v965(VarCurr,bitIndex47)<->$F)).
% 298.27/296.49  all VarCurr (v3045(VarCurr)-> (v965(VarCurr,bitIndex47)<->v2201(VarCurr))).
% 298.27/296.49  all VarCurr (v3045(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$F)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.27/296.50  all VarCurr (v797(VarCurr,bitIndex48)<->v799(VarCurr,bitIndex48)).
% 298.27/296.50  all VarCurr (v799(VarCurr,bitIndex48)<->v2593(VarCurr,bitIndex48)).
% 298.27/296.50  all VarCurr (v801(VarCurr,bitIndex48)<->v2592(VarCurr,bitIndex48)).
% 298.27/296.50  all VarCurr (v2381(VarCurr,bitIndex48)<->v2383(VarCurr,bitIndex48)).
% 298.27/296.50  all VarCurr (v2383(VarCurr,bitIndex48)<->v2385(VarCurr,bitIndex48)).
% 298.27/296.50  all VarCurr (v2385(VarCurr,bitIndex48)<->v2387(VarCurr,bitIndex48)).
% 298.27/296.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3036(VarNext)-> (v2387(VarNext,bitIndex48)<->v2387(VarCurr,bitIndex48)))).
% 298.27/296.50  all VarNext (v3036(VarNext)-> (v2387(VarNext,bitIndex48)<->v3038(VarNext))).
% 298.27/296.50  all VarCurr (v3038(VarCurr)<->v2389(VarCurr)&v3039(VarCurr)).
% 298.27/296.50  all VarCurr (v3039(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex527))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex526))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex525))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex524))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex523))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex522))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex521))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex520))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex519))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex518))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex517))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex516))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex515))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex514))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex513))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex512))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex511))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex510))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex509))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex508))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex507))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex506))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex505))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex504))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex503))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex502))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex501))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex500))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex499))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex498))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex497))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex496))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex495))).
% 298.27/296.50  all VarCurr (v3036(VarCurr)<->v3037(VarCurr)&v2585(VarCurr)).
% 298.27/296.50  all VarCurr (-v3037(VarCurr)<->v2547(VarCurr)).
% 298.27/296.50  all VarNext (v803(VarNext,bitIndex48)<->v3027(VarNext,bitIndex48)).
% 298.27/296.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3029(VarNext)-> (all B (range_63_0(B)-> (v3027(VarNext,B)<->v803(VarCurr,B)))))).
% 298.27/296.50  all VarNext (v3029(VarNext)-> (all B (range_63_0(B)-> (v3027(VarNext,B)<->v2377(VarNext,B))))).
% 298.27/296.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3029(VarNext)<->v3030(VarNext)&v2370(VarNext))).
% 298.27/296.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3030(VarNext)<->v3032(VarNext)&v2343(VarNext))).
% 298.27/296.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3032(VarNext)<->v2350(VarNext))).
% 298.27/296.50  all VarNext (v959(VarNext,bitIndex48)<->v3019(VarNext,bitIndex48)).
% 298.27/296.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3020(VarNext)-> (all B (range_63_0(B)-> (v3019(VarNext,B)<->v959(VarCurr,B)))))).
% 298.27/296.50  all VarNext (v3020(VarNext)-> (all B (range_63_0(B)-> (v3019(VarNext,B)<->v2356(VarNext,B))))).
% 298.27/296.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3020(VarNext)<->v3021(VarNext))).
% 298.27/296.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3021(VarNext)<->v3023(VarNext)&v2343(VarNext))).
% 298.27/296.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3023(VarNext)<->v2350(VarNext))).
% 298.27/296.50  all VarCurr (v961(VarCurr,bitIndex48)<->v2339(VarCurr,bitIndex48)).
% 298.27/296.50  all VarCurr (-v3017(VarCurr)-> (v2327(VarCurr,bitIndex48)<->$F)).
% 298.27/296.50  all VarCurr (v3017(VarCurr)-> (v2327(VarCurr,bitIndex48)<->v2329(VarCurr))).
% 298.27/296.50  all VarCurr (v3017(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.27/296.50  all VarCurr (v963(VarCurr,bitIndex48)<->v2325(VarCurr,bitIndex48)).
% 298.27/296.50  all VarCurr (-v3015(VarCurr)-> (v965(VarCurr,bitIndex48)<->$F)).
% 298.27/296.50  all VarCurr (v3015(VarCurr)-> (v965(VarCurr,bitIndex48)<->v2201(VarCurr))).
% 298.27/296.50  all VarCurr (v3015(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.27/296.50  all VarCurr (v797(VarCurr,bitIndex49)<->v799(VarCurr,bitIndex49)).
% 298.27/296.50  all VarCurr (v799(VarCurr,bitIndex49)<->v2593(VarCurr,bitIndex49)).
% 298.27/296.50  all VarCurr (v801(VarCurr,bitIndex49)<->v2592(VarCurr,bitIndex49)).
% 298.27/296.50  all VarCurr (v2381(VarCurr,bitIndex49)<->v2383(VarCurr,bitIndex49)).
% 298.27/296.50  all VarCurr (v2383(VarCurr,bitIndex49)<->v2385(VarCurr,bitIndex49)).
% 298.27/296.50  all VarCurr (v2385(VarCurr,bitIndex49)<->v2387(VarCurr,bitIndex49)).
% 298.27/296.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3006(VarNext)-> (v2387(VarNext,bitIndex49)<->v2387(VarCurr,bitIndex49)))).
% 298.27/296.50  all VarNext (v3006(VarNext)-> (v2387(VarNext,bitIndex49)<->v3008(VarNext))).
% 298.27/296.50  all VarCurr (v3008(VarCurr)<->v2389(VarCurr)&v3009(VarCurr)).
% 298.27/296.50  all VarCurr (v3009(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex494))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex493))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex492))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex491))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex490))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex489))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex488))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex487))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex486))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex485))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex484))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex483))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex482))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex481))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex480))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex479))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex478))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex477))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex476))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex475))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex474))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex473))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex472))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex471))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex470))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex469))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex468))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex467))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex466))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex465))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex464))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex463))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex462))).
% 298.27/296.50  all VarCurr (v3006(VarCurr)<->v3007(VarCurr)&v2585(VarCurr)).
% 298.27/296.50  all VarCurr (-v3007(VarCurr)<->v2547(VarCurr)).
% 298.27/296.50  all VarNext (v803(VarNext,bitIndex49)<->v2997(VarNext,bitIndex49)).
% 298.27/296.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2999(VarNext)-> (all B (range_63_0(B)-> (v2997(VarNext,B)<->v803(VarCurr,B)))))).
% 298.27/296.50  all VarNext (v2999(VarNext)-> (all B (range_63_0(B)-> (v2997(VarNext,B)<->v2377(VarNext,B))))).
% 298.27/296.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2999(VarNext)<->v3000(VarNext)&v2370(VarNext))).
% 298.27/296.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v3000(VarNext)<->v3002(VarNext)&v2343(VarNext))).
% 298.27/296.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v3002(VarNext)<->v2350(VarNext))).
% 298.27/296.50  all VarNext (v959(VarNext,bitIndex49)<->v2989(VarNext,bitIndex49)).
% 298.27/296.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2990(VarNext)-> (all B (range_63_0(B)-> (v2989(VarNext,B)<->v959(VarCurr,B)))))).
% 298.27/296.51  all VarNext (v2990(VarNext)-> (all B (range_63_0(B)-> (v2989(VarNext,B)<->v2356(VarNext,B))))).
% 298.27/296.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2990(VarNext)<->v2991(VarNext))).
% 298.27/296.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2991(VarNext)<->v2993(VarNext)&v2343(VarNext))).
% 298.27/296.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2993(VarNext)<->v2350(VarNext))).
% 298.27/296.51  all VarCurr (v961(VarCurr,bitIndex49)<->v2339(VarCurr,bitIndex49)).
% 298.27/296.51  all VarCurr (-v2987(VarCurr)-> (v2327(VarCurr,bitIndex49)<->$F)).
% 298.27/296.51  all VarCurr (v2987(VarCurr)-> (v2327(VarCurr,bitIndex49)<->v2329(VarCurr))).
% 298.27/296.51  all VarCurr (v2987(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.27/296.51  all VarCurr (v963(VarCurr,bitIndex49)<->v2325(VarCurr,bitIndex49)).
% 298.27/296.51  all VarCurr (-v2985(VarCurr)-> (v965(VarCurr,bitIndex49)<->$F)).
% 298.27/296.51  all VarCurr (v2985(VarCurr)-> (v965(VarCurr,bitIndex49)<->v2201(VarCurr))).
% 298.27/296.51  all VarCurr (v2985(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.27/296.51  all VarCurr (v797(VarCurr,bitIndex50)<->v799(VarCurr,bitIndex50)).
% 298.27/296.51  all VarCurr (v799(VarCurr,bitIndex50)<->v2593(VarCurr,bitIndex50)).
% 298.27/296.51  all VarCurr (v801(VarCurr,bitIndex50)<->v2592(VarCurr,bitIndex50)).
% 298.27/296.51  all VarCurr (v2381(VarCurr,bitIndex50)<->v2383(VarCurr,bitIndex50)).
% 298.27/296.51  all VarCurr (v2383(VarCurr,bitIndex50)<->v2385(VarCurr,bitIndex50)).
% 298.27/296.51  all VarCurr (v2385(VarCurr,bitIndex50)<->v2387(VarCurr,bitIndex50)).
% 298.27/296.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2976(VarNext)-> (v2387(VarNext,bitIndex50)<->v2387(VarCurr,bitIndex50)))).
% 298.27/296.51  all VarNext (v2976(VarNext)-> (v2387(VarNext,bitIndex50)<->v2978(VarNext))).
% 298.27/296.51  all VarCurr (v2978(VarCurr)<->v2389(VarCurr)&v2979(VarCurr)).
% 298.27/296.51  all VarCurr (v2979(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex461))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex460))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex459))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex458))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex457))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex456))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex455))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex454))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex453))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex452))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex451))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex450))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex449))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex448))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex447))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex446))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex445))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex444))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex443))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex442))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex441))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex440))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex439))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex438))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex437))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex436))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex435))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex434))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex433))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex432))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex431))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex430))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex429))).
% 298.27/296.51  all VarCurr (v2976(VarCurr)<->v2977(VarCurr)&v2585(VarCurr)).
% 298.27/296.51  all VarCurr (-v2977(VarCurr)<->v2547(VarCurr)).
% 298.27/296.51  all VarNext (v803(VarNext,bitIndex50)<->v2967(VarNext,bitIndex50)).
% 298.27/296.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2969(VarNext)-> (all B (range_63_0(B)-> (v2967(VarNext,B)<->v803(VarCurr,B)))))).
% 298.27/296.52  all VarNext (v2969(VarNext)-> (all B (range_63_0(B)-> (v2967(VarNext,B)<->v2377(VarNext,B))))).
% 298.27/296.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2969(VarNext)<->v2970(VarNext)&v2370(VarNext))).
% 298.27/296.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2970(VarNext)<->v2972(VarNext)&v2343(VarNext))).
% 298.27/296.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2972(VarNext)<->v2350(VarNext))).
% 298.27/296.52  all VarNext (v959(VarNext,bitIndex50)<->v2959(VarNext,bitIndex50)).
% 298.27/296.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2960(VarNext)-> (all B (range_63_0(B)-> (v2959(VarNext,B)<->v959(VarCurr,B)))))).
% 298.27/296.52  all VarNext (v2960(VarNext)-> (all B (range_63_0(B)-> (v2959(VarNext,B)<->v2356(VarNext,B))))).
% 298.27/296.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2960(VarNext)<->v2961(VarNext))).
% 298.27/296.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2961(VarNext)<->v2963(VarNext)&v2343(VarNext))).
% 298.27/296.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2963(VarNext)<->v2350(VarNext))).
% 298.27/296.52  all VarCurr (v961(VarCurr,bitIndex50)<->v2339(VarCurr,bitIndex50)).
% 298.27/296.52  all VarCurr (-v2957(VarCurr)-> (v2327(VarCurr,bitIndex50)<->$F)).
% 298.27/296.52  all VarCurr (v2957(VarCurr)-> (v2327(VarCurr,bitIndex50)<->v2329(VarCurr))).
% 298.27/296.52  all VarCurr (v2957(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.27/296.52  all VarCurr (v963(VarCurr,bitIndex50)<->v2325(VarCurr,bitIndex50)).
% 298.27/296.52  all VarCurr (-v2955(VarCurr)-> (v965(VarCurr,bitIndex50)<->$F)).
% 298.27/296.52  all VarCurr (v2955(VarCurr)-> (v965(VarCurr,bitIndex50)<->v2201(VarCurr))).
% 298.27/296.52  all VarCurr (v2955(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.27/296.52  all VarCurr (v797(VarCurr,bitIndex51)<->v799(VarCurr,bitIndex51)).
% 298.27/296.52  all VarCurr (v799(VarCurr,bitIndex51)<->v2593(VarCurr,bitIndex51)).
% 298.27/296.52  all VarCurr (v801(VarCurr,bitIndex51)<->v2592(VarCurr,bitIndex51)).
% 298.27/296.52  all VarCurr (v2381(VarCurr,bitIndex51)<->v2383(VarCurr,bitIndex51)).
% 298.27/296.52  all VarCurr (v2383(VarCurr,bitIndex51)<->v2385(VarCurr,bitIndex51)).
% 298.27/296.52  all VarCurr (v2385(VarCurr,bitIndex51)<->v2387(VarCurr,bitIndex51)).
% 298.27/296.52  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2946(VarNext)-> (v2387(VarNext,bitIndex51)<->v2387(VarCurr,bitIndex51)))).
% 298.27/296.52  all VarNext (v2946(VarNext)-> (v2387(VarNext,bitIndex51)<->v2948(VarNext))).
% 298.27/296.52  all VarCurr (v2948(VarCurr)<->v2389(VarCurr)&v2949(VarCurr)).
% 298.27/296.52  all VarCurr (v2949(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex428))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex427))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex426))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex425))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex424))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex423))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex422))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex421))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex420))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex419))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex418))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex417))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex416))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex415))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex414))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex413))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex412))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex411))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex410))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex409))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex408))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex407))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex406))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex405))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex404))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex403))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex402))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex401))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex400))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex399))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex398))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex397))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex396))).
% 298.27/296.53  all VarCurr (v2946(VarCurr)<->v2947(VarCurr)&v2585(VarCurr)).
% 298.27/296.53  all VarCurr (-v2947(VarCurr)<->v2547(VarCurr)).
% 298.27/296.53  all VarNext (v803(VarNext,bitIndex51)<->v2937(VarNext,bitIndex51)).
% 298.27/296.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2939(VarNext)-> (all B (range_63_0(B)-> (v2937(VarNext,B)<->v803(VarCurr,B)))))).
% 298.27/296.53  all VarNext (v2939(VarNext)-> (all B (range_63_0(B)-> (v2937(VarNext,B)<->v2377(VarNext,B))))).
% 298.27/296.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2939(VarNext)<->v2940(VarNext)&v2370(VarNext))).
% 298.27/296.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2940(VarNext)<->v2942(VarNext)&v2343(VarNext))).
% 298.27/296.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2942(VarNext)<->v2350(VarNext))).
% 298.27/296.53  all VarNext (v959(VarNext,bitIndex51)<->v2929(VarNext,bitIndex51)).
% 298.27/296.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2930(VarNext)-> (all B (range_63_0(B)-> (v2929(VarNext,B)<->v959(VarCurr,B)))))).
% 298.27/296.53  all VarNext (v2930(VarNext)-> (all B (range_63_0(B)-> (v2929(VarNext,B)<->v2356(VarNext,B))))).
% 298.27/296.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2930(VarNext)<->v2931(VarNext))).
% 298.27/296.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2931(VarNext)<->v2933(VarNext)&v2343(VarNext))).
% 298.27/296.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2933(VarNext)<->v2350(VarNext))).
% 298.27/296.53  all VarCurr (v961(VarCurr,bitIndex51)<->v2339(VarCurr,bitIndex51)).
% 298.27/296.53  all VarCurr (-v2927(VarCurr)-> (v2327(VarCurr,bitIndex51)<->$F)).
% 298.27/296.53  all VarCurr (v2927(VarCurr)-> (v2327(VarCurr,bitIndex51)<->v2329(VarCurr))).
% 298.27/296.53  all VarCurr (v2927(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.27/296.53  all VarCurr (v963(VarCurr,bitIndex51)<->v2325(VarCurr,bitIndex51)).
% 298.27/296.53  all VarCurr (-v2925(VarCurr)-> (v965(VarCurr,bitIndex51)<->$F)).
% 298.27/296.53  all VarCurr (v2925(VarCurr)-> (v965(VarCurr,bitIndex51)<->v2201(VarCurr))).
% 298.27/296.53  all VarCurr (v2925(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.27/296.53  all VarCurr (v797(VarCurr,bitIndex52)<->v799(VarCurr,bitIndex52)).
% 298.27/296.53  all VarCurr (v799(VarCurr,bitIndex52)<->v2593(VarCurr,bitIndex52)).
% 298.27/296.53  all VarCurr (v801(VarCurr,bitIndex52)<->v2592(VarCurr,bitIndex52)).
% 298.27/296.53  all VarCurr (v2381(VarCurr,bitIndex52)<->v2383(VarCurr,bitIndex52)).
% 298.27/296.53  all VarCurr (v2383(VarCurr,bitIndex52)<->v2385(VarCurr,bitIndex52)).
% 298.27/296.53  all VarCurr (v2385(VarCurr,bitIndex52)<->v2387(VarCurr,bitIndex52)).
% 298.27/296.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2916(VarNext)-> (v2387(VarNext,bitIndex52)<->v2387(VarCurr,bitIndex52)))).
% 298.27/296.53  all VarNext (v2916(VarNext)-> (v2387(VarNext,bitIndex52)<->v2918(VarNext))).
% 298.27/296.53  all VarCurr (v2918(VarCurr)<->v2389(VarCurr)&v2919(VarCurr)).
% 298.27/296.53  all VarCurr (v2919(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex395))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex394))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex393))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex392))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex391))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex390))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex389))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex388))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex387))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex386))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex385))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex384))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex383))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex382))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex381))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex380))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex379))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex378))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex377))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex376))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex375))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex374))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex373))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex372))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex371))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex370))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex369))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex368))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex367))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex366))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex365))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex364))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex363))).
% 298.34/296.53  all VarCurr (v2916(VarCurr)<->v2917(VarCurr)&v2585(VarCurr)).
% 298.34/296.53  all VarCurr (-v2917(VarCurr)<->v2547(VarCurr)).
% 298.34/296.53  all VarNext (v803(VarNext,bitIndex52)<->v2907(VarNext,bitIndex52)).
% 298.34/296.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2909(VarNext)-> (all B (range_63_0(B)-> (v2907(VarNext,B)<->v803(VarCurr,B)))))).
% 298.34/296.53  all VarNext (v2909(VarNext)-> (all B (range_63_0(B)-> (v2907(VarNext,B)<->v2377(VarNext,B))))).
% 298.34/296.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2909(VarNext)<->v2910(VarNext)&v2370(VarNext))).
% 298.34/296.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2910(VarNext)<->v2912(VarNext)&v2343(VarNext))).
% 298.34/296.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2912(VarNext)<->v2350(VarNext))).
% 298.34/296.53  all VarNext (v959(VarNext,bitIndex52)<->v2899(VarNext,bitIndex52)).
% 298.34/296.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2900(VarNext)-> (all B (range_63_0(B)-> (v2899(VarNext,B)<->v959(VarCurr,B)))))).
% 298.34/296.53  all VarNext (v2900(VarNext)-> (all B (range_63_0(B)-> (v2899(VarNext,B)<->v2356(VarNext,B))))).
% 298.34/296.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2900(VarNext)<->v2901(VarNext))).
% 298.34/296.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2901(VarNext)<->v2903(VarNext)&v2343(VarNext))).
% 298.34/296.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2903(VarNext)<->v2350(VarNext))).
% 298.34/296.53  all VarCurr (v961(VarCurr,bitIndex52)<->v2339(VarCurr,bitIndex52)).
% 298.34/296.53  all VarCurr (-v2897(VarCurr)-> (v2327(VarCurr,bitIndex52)<->$F)).
% 298.34/296.53  all VarCurr (v2897(VarCurr)-> (v2327(VarCurr,bitIndex52)<->v2329(VarCurr))).
% 298.34/296.53  all VarCurr (v2897(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.34/296.53  all VarCurr (v963(VarCurr,bitIndex52)<->v2325(VarCurr,bitIndex52)).
% 298.34/296.53  all VarCurr (-v2895(VarCurr)-> (v965(VarCurr,bitIndex52)<->$F)).
% 298.34/296.53  all VarCurr (v2895(VarCurr)-> (v965(VarCurr,bitIndex52)<->v2201(VarCurr))).
% 298.34/296.53  all VarCurr (v2895(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.34/296.53  all VarCurr (v797(VarCurr,bitIndex53)<->v799(VarCurr,bitIndex53)).
% 298.34/296.53  all VarCurr (v799(VarCurr,bitIndex53)<->v2593(VarCurr,bitIndex53)).
% 298.34/296.53  all VarCurr (v801(VarCurr,bitIndex53)<->v2592(VarCurr,bitIndex53)).
% 298.34/296.53  all VarCurr (v2381(VarCurr,bitIndex53)<->v2383(VarCurr,bitIndex53)).
% 298.34/296.53  all VarCurr (v2383(VarCurr,bitIndex53)<->v2385(VarCurr,bitIndex53)).
% 298.34/296.53  all VarCurr (v2385(VarCurr,bitIndex53)<->v2387(VarCurr,bitIndex53)).
% 298.34/296.53  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2886(VarNext)-> (v2387(VarNext,bitIndex53)<->v2387(VarCurr,bitIndex53)))).
% 298.34/296.53  all VarNext (v2886(VarNext)-> (v2387(VarNext,bitIndex53)<->v2888(VarNext))).
% 298.34/296.53  all VarCurr (v2888(VarCurr)<->v2389(VarCurr)&v2889(VarCurr)).
% 298.34/296.53  all VarCurr (v2889(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex362))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex361))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex360))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex359))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex358))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex357))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex356))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex355))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex354))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex353))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex352))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex351))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex350))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex349))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex348))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex347))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex346))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex345))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex344))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex343))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex342))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex341))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex340))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex339))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex338))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex337))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex336))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex335))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex334))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex333))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex332))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex331))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex330))).
% 298.34/296.54  all VarCurr (v2886(VarCurr)<->v2887(VarCurr)&v2585(VarCurr)).
% 298.34/296.54  all VarCurr (-v2887(VarCurr)<->v2547(VarCurr)).
% 298.34/296.54  all VarNext (v803(VarNext,bitIndex53)<->v2877(VarNext,bitIndex53)).
% 298.34/296.54  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2879(VarNext)-> (all B (range_63_0(B)-> (v2877(VarNext,B)<->v803(VarCurr,B)))))).
% 298.34/296.54  all VarNext (v2879(VarNext)-> (all B (range_63_0(B)-> (v2877(VarNext,B)<->v2377(VarNext,B))))).
% 298.34/296.54  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2879(VarNext)<->v2880(VarNext)&v2370(VarNext))).
% 298.34/296.54  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2880(VarNext)<->v2882(VarNext)&v2343(VarNext))).
% 298.34/296.54  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2882(VarNext)<->v2350(VarNext))).
% 298.34/296.54  all VarNext (v959(VarNext,bitIndex53)<->v2869(VarNext,bitIndex53)).
% 298.34/296.54  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2870(VarNext)-> (all B (range_63_0(B)-> (v2869(VarNext,B)<->v959(VarCurr,B)))))).
% 298.34/296.54  all VarNext (v2870(VarNext)-> (all B (range_63_0(B)-> (v2869(VarNext,B)<->v2356(VarNext,B))))).
% 298.34/296.54  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2870(VarNext)<->v2871(VarNext))).
% 298.34/296.54  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2871(VarNext)<->v2873(VarNext)&v2343(VarNext))).
% 298.34/296.54  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2873(VarNext)<->v2350(VarNext))).
% 298.34/296.54  all VarCurr (v961(VarCurr,bitIndex53)<->v2339(VarCurr,bitIndex53)).
% 298.34/296.54  all VarCurr (-v2867(VarCurr)-> (v2327(VarCurr,bitIndex53)<->$F)).
% 298.34/296.54  all VarCurr (v2867(VarCurr)-> (v2327(VarCurr,bitIndex53)<->v2329(VarCurr))).
% 298.34/296.54  all VarCurr (v2867(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.34/296.54  all VarCurr (v963(VarCurr,bitIndex53)<->v2325(VarCurr,bitIndex53)).
% 298.34/296.54  all VarCurr (-v2865(VarCurr)-> (v965(VarCurr,bitIndex53)<->$F)).
% 298.34/296.54  all VarCurr (v2865(VarCurr)-> (v965(VarCurr,bitIndex53)<->v2201(VarCurr))).
% 298.34/296.54  all VarCurr (v2865(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.34/296.54  all VarCurr (v797(VarCurr,bitIndex54)<->v799(VarCurr,bitIndex54)).
% 298.34/296.54  all VarCurr (v799(VarCurr,bitIndex54)<->v2593(VarCurr,bitIndex54)).
% 298.34/296.54  all VarCurr (v801(VarCurr,bitIndex54)<->v2592(VarCurr,bitIndex54)).
% 298.34/296.54  all VarCurr (v2381(VarCurr,bitIndex54)<->v2383(VarCurr,bitIndex54)).
% 298.34/296.54  all VarCurr (v2383(VarCurr,bitIndex54)<->v2385(VarCurr,bitIndex54)).
% 298.34/296.54  all VarCurr (v2385(VarCurr,bitIndex54)<->v2387(VarCurr,bitIndex54)).
% 298.34/296.54  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2856(VarNext)-> (v2387(VarNext,bitIndex54)<->v2387(VarCurr,bitIndex54)))).
% 298.34/296.54  all VarNext (v2856(VarNext)-> (v2387(VarNext,bitIndex54)<->v2858(VarNext))).
% 298.34/296.54  all VarCurr (v2858(VarCurr)<->v2389(VarCurr)&v2859(VarCurr)).
% 298.34/296.54  all VarCurr (v2859(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex329))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex328))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex327))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex326))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex325))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex324))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex323))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex322))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex321))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex320))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex319))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex318))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex317))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex316))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex315))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex314))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex313))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex312))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex311))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex310))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex309))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex308))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex307))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex306))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex305))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex304))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex303))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex302))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex301))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex300))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex299))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex298))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex297))).
% 298.34/296.54  all VarCurr (v2856(VarCurr)<->v2857(VarCurr)&v2585(VarCurr)).
% 298.34/296.54  all VarCurr (-v2857(VarCurr)<->v2547(VarCurr)).
% 298.34/296.54  all VarNext (v803(VarNext,bitIndex54)<->v2847(VarNext,bitIndex54)).
% 298.34/296.54  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2849(VarNext)-> (all B (range_63_0(B)-> (v2847(VarNext,B)<->v803(VarCurr,B)))))).
% 298.34/296.54  all VarNext (v2849(VarNext)-> (all B (range_63_0(B)-> (v2847(VarNext,B)<->v2377(VarNext,B))))).
% 298.34/296.54  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2849(VarNext)<->v2850(VarNext)&v2370(VarNext))).
% 298.34/296.54  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2850(VarNext)<->v2852(VarNext)&v2343(VarNext))).
% 298.34/296.54  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2852(VarNext)<->v2350(VarNext))).
% 298.34/296.54  all VarNext (v959(VarNext,bitIndex54)<->v2839(VarNext,bitIndex54)).
% 298.34/296.54  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2840(VarNext)-> (all B (range_63_0(B)-> (v2839(VarNext,B)<->v959(VarCurr,B)))))).
% 298.34/296.54  all VarNext (v2840(VarNext)-> (all B (range_63_0(B)-> (v2839(VarNext,B)<->v2356(VarNext,B))))).
% 298.34/296.54  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2840(VarNext)<->v2841(VarNext))).
% 298.34/296.54  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2841(VarNext)<->v2843(VarNext)&v2343(VarNext))).
% 298.34/296.54  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2843(VarNext)<->v2350(VarNext))).
% 298.34/296.54  all VarCurr (v961(VarCurr,bitIndex54)<->v2339(VarCurr,bitIndex54)).
% 298.34/296.54  all VarCurr (-v2837(VarCurr)-> (v2327(VarCurr,bitIndex54)<->$F)).
% 298.34/296.54  all VarCurr (v2837(VarCurr)-> (v2327(VarCurr,bitIndex54)<->v2329(VarCurr))).
% 298.34/296.54  all VarCurr (v2837(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.34/296.55  all VarCurr (v963(VarCurr,bitIndex54)<->v2325(VarCurr,bitIndex54)).
% 298.34/296.55  all VarCurr (-v2835(VarCurr)-> (v965(VarCurr,bitIndex54)<->$F)).
% 298.34/296.55  all VarCurr (v2835(VarCurr)-> (v965(VarCurr,bitIndex54)<->v2201(VarCurr))).
% 298.34/296.55  all VarCurr (v2835(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.34/296.55  all VarCurr (v797(VarCurr,bitIndex55)<->v799(VarCurr,bitIndex55)).
% 298.34/296.55  all VarCurr (v799(VarCurr,bitIndex55)<->v2593(VarCurr,bitIndex55)).
% 298.34/296.55  all VarCurr (v801(VarCurr,bitIndex55)<->v2592(VarCurr,bitIndex55)).
% 298.34/296.55  all VarCurr (v2381(VarCurr,bitIndex55)<->v2383(VarCurr,bitIndex55)).
% 298.34/296.55  all VarCurr (v2383(VarCurr,bitIndex55)<->v2385(VarCurr,bitIndex55)).
% 298.34/296.55  all VarCurr (v2385(VarCurr,bitIndex55)<->v2387(VarCurr,bitIndex55)).
% 298.34/296.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2826(VarNext)-> (v2387(VarNext,bitIndex55)<->v2387(VarCurr,bitIndex55)))).
% 298.34/296.55  all VarNext (v2826(VarNext)-> (v2387(VarNext,bitIndex55)<->v2828(VarNext))).
% 298.34/296.55  all VarCurr (v2828(VarCurr)<->v2389(VarCurr)&v2829(VarCurr)).
% 298.34/296.55  all VarCurr (v2829(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex296))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex295))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex294))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex293))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex292))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex291))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex290))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex289))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex288))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex287))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex286))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex285))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex284))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex283))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex282))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex281))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex280))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex279))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex278))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex277))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex276))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex275))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex274))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex273))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex272))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex271))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex270))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex269))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex268))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex267))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex266))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex265))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex264))).
% 298.34/296.55  all VarCurr (v2826(VarCurr)<->v2827(VarCurr)&v2585(VarCurr)).
% 298.34/296.55  all VarCurr (-v2827(VarCurr)<->v2547(VarCurr)).
% 298.34/296.55  all VarNext (v803(VarNext,bitIndex55)<->v2817(VarNext,bitIndex55)).
% 298.34/296.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2819(VarNext)-> (all B (range_63_0(B)-> (v2817(VarNext,B)<->v803(VarCurr,B)))))).
% 298.34/296.55  all VarNext (v2819(VarNext)-> (all B (range_63_0(B)-> (v2817(VarNext,B)<->v2377(VarNext,B))))).
% 298.34/296.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2819(VarNext)<->v2820(VarNext)&v2370(VarNext))).
% 298.34/296.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2820(VarNext)<->v2822(VarNext)&v2343(VarNext))).
% 298.34/296.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2822(VarNext)<->v2350(VarNext))).
% 298.34/296.55  all VarNext (v959(VarNext,bitIndex55)<->v2809(VarNext,bitIndex55)).
% 298.34/296.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2810(VarNext)-> (all B (range_63_0(B)-> (v2809(VarNext,B)<->v959(VarCurr,B)))))).
% 298.34/296.55  all VarNext (v2810(VarNext)-> (all B (range_63_0(B)-> (v2809(VarNext,B)<->v2356(VarNext,B))))).
% 298.34/296.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2810(VarNext)<->v2811(VarNext))).
% 298.34/296.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2811(VarNext)<->v2813(VarNext)&v2343(VarNext))).
% 298.34/296.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2813(VarNext)<->v2350(VarNext))).
% 298.34/296.56  all VarCurr (v961(VarCurr,bitIndex55)<->v2339(VarCurr,bitIndex55)).
% 298.34/296.56  all VarCurr (-v2807(VarCurr)-> (v2327(VarCurr,bitIndex55)<->$F)).
% 298.34/296.56  all VarCurr (v2807(VarCurr)-> (v2327(VarCurr,bitIndex55)<->v2329(VarCurr))).
% 298.34/296.56  all VarCurr (v2807(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.34/296.56  all VarCurr (v963(VarCurr,bitIndex55)<->v2325(VarCurr,bitIndex55)).
% 298.34/296.56  all VarCurr (-v2805(VarCurr)-> (v965(VarCurr,bitIndex55)<->$F)).
% 298.34/296.56  all VarCurr (v2805(VarCurr)-> (v965(VarCurr,bitIndex55)<->v2201(VarCurr))).
% 298.34/296.56  all VarCurr (v2805(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$F)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.34/296.56  all VarCurr (v797(VarCurr,bitIndex56)<->v799(VarCurr,bitIndex56)).
% 298.34/296.56  all VarCurr (v799(VarCurr,bitIndex56)<->v2593(VarCurr,bitIndex56)).
% 298.34/296.56  all VarCurr (v801(VarCurr,bitIndex56)<->v2592(VarCurr,bitIndex56)).
% 298.34/296.56  all VarCurr (v2381(VarCurr,bitIndex56)<->v2383(VarCurr,bitIndex56)).
% 298.34/296.56  all VarCurr (v2383(VarCurr,bitIndex56)<->v2385(VarCurr,bitIndex56)).
% 298.34/296.56  all VarCurr (v2385(VarCurr,bitIndex56)<->v2387(VarCurr,bitIndex56)).
% 298.34/296.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2796(VarNext)-> (v2387(VarNext,bitIndex56)<->v2387(VarCurr,bitIndex56)))).
% 298.34/296.56  all VarNext (v2796(VarNext)-> (v2387(VarNext,bitIndex56)<->v2798(VarNext))).
% 298.34/296.56  all VarCurr (v2798(VarCurr)<->v2389(VarCurr)&v2799(VarCurr)).
% 298.34/296.56  all VarCurr (v2799(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex263))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex262))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex261))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex260))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex259))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex258))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex257))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex256))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex255))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex254))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex253))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex252))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex251))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex250))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex249))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex248))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex247))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex246))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex245))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex244))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex243))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex242))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex241))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex240))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex239))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex238))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex237))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex236))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex235))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex234))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex233))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex232))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex231))).
% 298.34/296.56  all VarCurr (v2796(VarCurr)<->v2797(VarCurr)&v2585(VarCurr)).
% 298.34/296.56  all VarCurr (-v2797(VarCurr)<->v2547(VarCurr)).
% 298.34/296.56  all VarNext (v803(VarNext,bitIndex56)<->v2787(VarNext,bitIndex56)).
% 298.34/296.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2789(VarNext)-> (all B (range_63_0(B)-> (v2787(VarNext,B)<->v803(VarCurr,B)))))).
% 298.37/296.57  all VarNext (v2789(VarNext)-> (all B (range_63_0(B)-> (v2787(VarNext,B)<->v2377(VarNext,B))))).
% 298.37/296.57  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2789(VarNext)<->v2790(VarNext)&v2370(VarNext))).
% 298.37/296.57  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2790(VarNext)<->v2792(VarNext)&v2343(VarNext))).
% 298.37/296.57  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2792(VarNext)<->v2350(VarNext))).
% 298.37/296.57  all VarNext (v959(VarNext,bitIndex56)<->v2779(VarNext,bitIndex56)).
% 298.37/296.57  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2780(VarNext)-> (all B (range_63_0(B)-> (v2779(VarNext,B)<->v959(VarCurr,B)))))).
% 298.37/296.57  all VarNext (v2780(VarNext)-> (all B (range_63_0(B)-> (v2779(VarNext,B)<->v2356(VarNext,B))))).
% 298.37/296.57  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2780(VarNext)<->v2781(VarNext))).
% 298.37/296.57  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2781(VarNext)<->v2783(VarNext)&v2343(VarNext))).
% 298.37/296.57  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2783(VarNext)<->v2350(VarNext))).
% 298.37/296.57  all VarCurr (v961(VarCurr,bitIndex56)<->v2339(VarCurr,bitIndex56)).
% 298.37/296.57  all VarCurr (-v2777(VarCurr)-> (v2327(VarCurr,bitIndex56)<->$F)).
% 298.37/296.57  all VarCurr (v2777(VarCurr)-> (v2327(VarCurr,bitIndex56)<->v2329(VarCurr))).
% 298.37/296.57  all VarCurr (v2777(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.37/296.57  all VarCurr (v963(VarCurr,bitIndex56)<->v2325(VarCurr,bitIndex56)).
% 298.37/296.57  all VarCurr (-v2775(VarCurr)-> (v965(VarCurr,bitIndex56)<->$F)).
% 298.37/296.57  all VarCurr (v2775(VarCurr)-> (v965(VarCurr,bitIndex56)<->v2201(VarCurr))).
% 298.37/296.57  all VarCurr (v2775(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.37/296.57  all VarCurr (v797(VarCurr,bitIndex57)<->v799(VarCurr,bitIndex57)).
% 298.37/296.57  all VarCurr (v799(VarCurr,bitIndex57)<->v2593(VarCurr,bitIndex57)).
% 298.37/296.57  all VarCurr (v801(VarCurr,bitIndex57)<->v2592(VarCurr,bitIndex57)).
% 298.37/296.57  all VarCurr (v2381(VarCurr,bitIndex57)<->v2383(VarCurr,bitIndex57)).
% 298.37/296.57  all VarCurr (v2383(VarCurr,bitIndex57)<->v2385(VarCurr,bitIndex57)).
% 298.37/296.57  all VarCurr (v2385(VarCurr,bitIndex57)<->v2387(VarCurr,bitIndex57)).
% 298.37/296.57  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2766(VarNext)-> (v2387(VarNext,bitIndex57)<->v2387(VarCurr,bitIndex57)))).
% 298.37/296.57  all VarNext (v2766(VarNext)-> (v2387(VarNext,bitIndex57)<->v2768(VarNext))).
% 298.37/296.57  all VarCurr (v2768(VarCurr)<->v2389(VarCurr)&v2769(VarCurr)).
% 298.37/296.57  all VarCurr (v2769(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex230))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex229))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex228))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex227))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex226))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex225))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex224))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex223))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex222))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex221))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex220))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex219))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex218))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex217))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex216))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex215))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex214))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex213))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex212))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex211))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex210))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex209))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex208))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex207))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex206))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex205))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex204))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex203))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex202))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex201))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex200))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex199))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex198))).
% 298.37/296.57  all VarCurr (v2766(VarCurr)<->v2767(VarCurr)&v2585(VarCurr)).
% 298.37/296.57  all VarCurr (-v2767(VarCurr)<->v2547(VarCurr)).
% 298.37/296.57  all VarNext (v803(VarNext,bitIndex57)<->v2757(VarNext,bitIndex57)).
% 298.37/296.57  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2759(VarNext)-> (all B (range_63_0(B)-> (v2757(VarNext,B)<->v803(VarCurr,B)))))).
% 298.37/296.57  all VarNext (v2759(VarNext)-> (all B (range_63_0(B)-> (v2757(VarNext,B)<->v2377(VarNext,B))))).
% 298.37/296.57  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2759(VarNext)<->v2760(VarNext)&v2370(VarNext))).
% 298.37/296.57  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2760(VarNext)<->v2762(VarNext)&v2343(VarNext))).
% 298.37/296.57  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2762(VarNext)<->v2350(VarNext))).
% 298.37/296.57  all VarNext (v959(VarNext,bitIndex57)<->v2749(VarNext,bitIndex57)).
% 298.37/296.57  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2750(VarNext)-> (all B (range_63_0(B)-> (v2749(VarNext,B)<->v959(VarCurr,B)))))).
% 298.37/296.57  all VarNext (v2750(VarNext)-> (all B (range_63_0(B)-> (v2749(VarNext,B)<->v2356(VarNext,B))))).
% 298.37/296.57  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2750(VarNext)<->v2751(VarNext))).
% 298.37/296.57  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2751(VarNext)<->v2753(VarNext)&v2343(VarNext))).
% 298.37/296.57  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2753(VarNext)<->v2350(VarNext))).
% 298.37/296.57  all VarCurr (v961(VarCurr,bitIndex57)<->v2339(VarCurr,bitIndex57)).
% 298.37/296.57  all VarCurr (-v2747(VarCurr)-> (v2327(VarCurr,bitIndex57)<->$F)).
% 298.37/296.57  all VarCurr (v2747(VarCurr)-> (v2327(VarCurr,bitIndex57)<->v2329(VarCurr))).
% 298.37/296.57  all VarCurr (v2747(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.37/296.57  all VarCurr (v963(VarCurr,bitIndex57)<->v2325(VarCurr,bitIndex57)).
% 298.37/296.57  all VarCurr (-v2745(VarCurr)-> (v965(VarCurr,bitIndex57)<->$F)).
% 298.37/296.57  all VarCurr (v2745(VarCurr)-> (v965(VarCurr,bitIndex57)<->v2201(VarCurr))).
% 298.37/296.57  all VarCurr (v2745(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.37/296.57  all VarCurr (v797(VarCurr,bitIndex58)<->v799(VarCurr,bitIndex58)).
% 298.37/296.57  all VarCurr (v799(VarCurr,bitIndex58)<->v2593(VarCurr,bitIndex58)).
% 298.37/296.57  all VarCurr (v801(VarCurr,bitIndex58)<->v2592(VarCurr,bitIndex58)).
% 298.37/296.57  all VarCurr (v2381(VarCurr,bitIndex58)<->v2383(VarCurr,bitIndex58)).
% 298.37/296.57  all VarCurr (v2383(VarCurr,bitIndex58)<->v2385(VarCurr,bitIndex58)).
% 298.37/296.57  all VarCurr (v2385(VarCurr,bitIndex58)<->v2387(VarCurr,bitIndex58)).
% 298.37/296.57  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2736(VarNext)-> (v2387(VarNext,bitIndex58)<->v2387(VarCurr,bitIndex58)))).
% 298.37/296.57  all VarNext (v2736(VarNext)-> (v2387(VarNext,bitIndex58)<->v2738(VarNext))).
% 298.37/296.57  all VarCurr (v2738(VarCurr)<->v2389(VarCurr)&v2739(VarCurr)).
% 298.37/296.57  all VarCurr (v2739(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex197))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex196))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex195))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex194))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex193))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex192))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex191))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex190))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex189))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex188))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex187))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex186))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex185))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex184))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex183))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex182))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex181))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex180))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex179))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex178))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex177))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex176))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex175))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex174))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex173))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex172))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex171))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex170))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex169))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex168))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex167))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex166))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex165))).
% 298.37/296.58  all VarCurr (v2736(VarCurr)<->v2737(VarCurr)&v2585(VarCurr)).
% 298.37/296.58  all VarCurr (-v2737(VarCurr)<->v2547(VarCurr)).
% 298.37/296.58  all VarNext (v803(VarNext,bitIndex58)<->v2727(VarNext,bitIndex58)).
% 298.37/296.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2729(VarNext)-> (all B (range_63_0(B)-> (v2727(VarNext,B)<->v803(VarCurr,B)))))).
% 298.37/296.58  all VarNext (v2729(VarNext)-> (all B (range_63_0(B)-> (v2727(VarNext,B)<->v2377(VarNext,B))))).
% 298.37/296.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2729(VarNext)<->v2730(VarNext)&v2370(VarNext))).
% 298.37/296.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2730(VarNext)<->v2732(VarNext)&v2343(VarNext))).
% 298.37/296.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2732(VarNext)<->v2350(VarNext))).
% 298.37/296.58  all VarNext (v959(VarNext,bitIndex58)<->v2719(VarNext,bitIndex58)).
% 298.37/296.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2720(VarNext)-> (all B (range_63_0(B)-> (v2719(VarNext,B)<->v959(VarCurr,B)))))).
% 298.37/296.58  all VarNext (v2720(VarNext)-> (all B (range_63_0(B)-> (v2719(VarNext,B)<->v2356(VarNext,B))))).
% 298.37/296.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2720(VarNext)<->v2721(VarNext))).
% 298.37/296.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2721(VarNext)<->v2723(VarNext)&v2343(VarNext))).
% 298.37/296.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2723(VarNext)<->v2350(VarNext))).
% 298.37/296.58  all VarCurr (v961(VarCurr,bitIndex58)<->v2339(VarCurr,bitIndex58)).
% 298.37/296.58  all VarCurr (-v2717(VarCurr)-> (v2327(VarCurr,bitIndex58)<->$F)).
% 298.37/296.58  all VarCurr (v2717(VarCurr)-> (v2327(VarCurr,bitIndex58)<->v2329(VarCurr))).
% 298.37/296.58  all VarCurr (v2717(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.37/296.58  all VarCurr (v963(VarCurr,bitIndex58)<->v2325(VarCurr,bitIndex58)).
% 298.37/296.58  all VarCurr (-v2715(VarCurr)-> (v965(VarCurr,bitIndex58)<->$F)).
% 298.37/296.58  all VarCurr (v2715(VarCurr)-> (v965(VarCurr,bitIndex58)<->v2201(VarCurr))).
% 298.37/296.58  all VarCurr (v2715(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.37/296.58  all VarCurr (v797(VarCurr,bitIndex59)<->v799(VarCurr,bitIndex59)).
% 298.37/296.58  all VarCurr (v799(VarCurr,bitIndex59)<->v2593(VarCurr,bitIndex59)).
% 298.37/296.58  all VarCurr (v801(VarCurr,bitIndex59)<->v2592(VarCurr,bitIndex59)).
% 298.37/296.58  all VarCurr (v2381(VarCurr,bitIndex59)<->v2383(VarCurr,bitIndex59)).
% 298.37/296.58  all VarCurr (v2383(VarCurr,bitIndex59)<->v2385(VarCurr,bitIndex59)).
% 298.37/296.58  all VarCurr (v2385(VarCurr,bitIndex59)<->v2387(VarCurr,bitIndex59)).
% 298.37/296.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2706(VarNext)-> (v2387(VarNext,bitIndex59)<->v2387(VarCurr,bitIndex59)))).
% 298.37/296.58  all VarNext (v2706(VarNext)-> (v2387(VarNext,bitIndex59)<->v2708(VarNext))).
% 298.37/296.58  all VarCurr (v2708(VarCurr)<->v2389(VarCurr)&v2709(VarCurr)).
% 298.37/296.58  all VarCurr (v2709(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex164))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex163))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex162))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex161))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex160))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex159))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex158))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex157))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex156))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex155))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex154))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex153))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex152))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex151))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex150))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex149))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex148))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex147))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex146))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex145))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex144))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex143))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex142))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex141))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex140))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex139))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex138))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex137))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex136))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex135))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex134))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex133))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex132))).
% 298.37/296.59  all VarCurr (v2706(VarCurr)<->v2707(VarCurr)&v2585(VarCurr)).
% 298.37/296.59  all VarCurr (-v2707(VarCurr)<->v2547(VarCurr)).
% 298.37/296.59  all VarNext (v803(VarNext,bitIndex59)<->v2697(VarNext,bitIndex59)).
% 298.37/296.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2699(VarNext)-> (all B (range_63_0(B)-> (v2697(VarNext,B)<->v803(VarCurr,B)))))).
% 298.37/296.59  all VarNext (v2699(VarNext)-> (all B (range_63_0(B)-> (v2697(VarNext,B)<->v2377(VarNext,B))))).
% 298.37/296.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2699(VarNext)<->v2700(VarNext)&v2370(VarNext))).
% 298.37/296.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2700(VarNext)<->v2702(VarNext)&v2343(VarNext))).
% 298.37/296.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2702(VarNext)<->v2350(VarNext))).
% 298.37/296.59  all VarNext (v959(VarNext,bitIndex59)<->v2689(VarNext,bitIndex59)).
% 298.37/296.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2690(VarNext)-> (all B (range_63_0(B)-> (v2689(VarNext,B)<->v959(VarCurr,B)))))).
% 298.37/296.59  all VarNext (v2690(VarNext)-> (all B (range_63_0(B)-> (v2689(VarNext,B)<->v2356(VarNext,B))))).
% 298.37/296.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2690(VarNext)<->v2691(VarNext))).
% 298.37/296.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2691(VarNext)<->v2693(VarNext)&v2343(VarNext))).
% 298.37/296.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2693(VarNext)<->v2350(VarNext))).
% 298.37/296.59  all VarCurr (v961(VarCurr,bitIndex59)<->v2339(VarCurr,bitIndex59)).
% 298.37/296.59  all VarCurr (-v2687(VarCurr)-> (v2327(VarCurr,bitIndex59)<->$F)).
% 298.37/296.59  all VarCurr (v2687(VarCurr)-> (v2327(VarCurr,bitIndex59)<->v2329(VarCurr))).
% 298.37/296.59  all VarCurr (v2687(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.37/296.59  all VarCurr (v963(VarCurr,bitIndex59)<->v2325(VarCurr,bitIndex59)).
% 298.37/296.59  all VarCurr (-v2685(VarCurr)-> (v965(VarCurr,bitIndex59)<->$F)).
% 298.37/296.59  all VarCurr (v2685(VarCurr)-> (v965(VarCurr,bitIndex59)<->v2201(VarCurr))).
% 298.37/296.59  all VarCurr (v2685(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$F)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.37/296.59  all VarCurr (v797(VarCurr,bitIndex60)<->v799(VarCurr,bitIndex60)).
% 298.37/296.59  all VarCurr (v799(VarCurr,bitIndex60)<->v2593(VarCurr,bitIndex60)).
% 298.37/296.59  all VarCurr (v801(VarCurr,bitIndex60)<->v2592(VarCurr,bitIndex60)).
% 298.37/296.59  all VarCurr (v2381(VarCurr,bitIndex60)<->v2383(VarCurr,bitIndex60)).
% 298.37/296.59  all VarCurr (v2383(VarCurr,bitIndex60)<->v2385(VarCurr,bitIndex60)).
% 298.37/296.59  all VarCurr (v2385(VarCurr,bitIndex60)<->v2387(VarCurr,bitIndex60)).
% 298.37/296.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2676(VarNext)-> (v2387(VarNext,bitIndex60)<->v2387(VarCurr,bitIndex60)))).
% 298.37/296.59  all VarNext (v2676(VarNext)-> (v2387(VarNext,bitIndex60)<->v2678(VarNext))).
% 298.37/296.59  all VarCurr (v2678(VarCurr)<->v2389(VarCurr)&v2679(VarCurr)).
% 298.37/296.59  all VarCurr (v2679(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex131))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex130))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex129))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex128))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex127))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex126))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex125))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex124))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex123))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex122))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex121))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex120))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex119))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex118))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex117))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex116))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex115))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex114))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex113))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex112))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex111))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex110))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex109))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex108))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex107))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex106))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex105))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex104))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex103))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex102))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex101))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex100))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex99))).
% 298.37/296.59  all VarCurr (v2676(VarCurr)<->v2677(VarCurr)&v2585(VarCurr)).
% 298.37/296.59  all VarCurr (-v2677(VarCurr)<->v2547(VarCurr)).
% 298.37/296.59  all VarNext (v803(VarNext,bitIndex60)<->v2667(VarNext,bitIndex60)).
% 298.37/296.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2669(VarNext)-> (all B (range_63_0(B)-> (v2667(VarNext,B)<->v803(VarCurr,B)))))).
% 298.37/296.59  all VarNext (v2669(VarNext)-> (all B (range_63_0(B)-> (v2667(VarNext,B)<->v2377(VarNext,B))))).
% 298.37/296.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2669(VarNext)<->v2670(VarNext)&v2370(VarNext))).
% 298.37/296.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2670(VarNext)<->v2672(VarNext)&v2343(VarNext))).
% 298.37/296.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2672(VarNext)<->v2350(VarNext))).
% 298.37/296.59  all VarNext (v959(VarNext,bitIndex60)<->v2659(VarNext,bitIndex60)).
% 298.37/296.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2660(VarNext)-> (all B (range_63_0(B)-> (v2659(VarNext,B)<->v959(VarCurr,B)))))).
% 298.37/296.59  all VarNext (v2660(VarNext)-> (all B (range_63_0(B)-> (v2659(VarNext,B)<->v2356(VarNext,B))))).
% 298.37/296.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2660(VarNext)<->v2661(VarNext))).
% 298.37/296.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2661(VarNext)<->v2663(VarNext)&v2343(VarNext))).
% 298.37/296.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2663(VarNext)<->v2350(VarNext))).
% 298.37/296.59  all VarCurr (v961(VarCurr,bitIndex60)<->v2339(VarCurr,bitIndex60)).
% 298.37/296.59  all VarCurr (-v2657(VarCurr)-> (v2327(VarCurr,bitIndex60)<->$F)).
% 298.37/296.59  all VarCurr (v2657(VarCurr)-> (v2327(VarCurr,bitIndex60)<->v2329(VarCurr))).
% 298.37/296.59  all VarCurr (v2657(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.37/296.59  all VarCurr (v963(VarCurr,bitIndex60)<->v2325(VarCurr,bitIndex60)).
% 298.37/296.59  all VarCurr (-v2655(VarCurr)-> (v965(VarCurr,bitIndex60)<->$F)).
% 298.37/296.60  all VarCurr (v2655(VarCurr)-> (v965(VarCurr,bitIndex60)<->v2201(VarCurr))).
% 298.37/296.60  all VarCurr (v2655(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.37/296.60  all VarCurr (v797(VarCurr,bitIndex61)<->v799(VarCurr,bitIndex61)).
% 298.37/296.60  all VarCurr (v799(VarCurr,bitIndex61)<->v2593(VarCurr,bitIndex61)).
% 298.37/296.60  all VarCurr (v801(VarCurr,bitIndex61)<->v2592(VarCurr,bitIndex61)).
% 298.37/296.60  all VarCurr (v2381(VarCurr,bitIndex61)<->v2383(VarCurr,bitIndex61)).
% 298.37/296.60  all VarCurr (v2383(VarCurr,bitIndex61)<->v2385(VarCurr,bitIndex61)).
% 298.37/296.60  all VarCurr (v2385(VarCurr,bitIndex61)<->v2387(VarCurr,bitIndex61)).
% 298.37/296.60  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2646(VarNext)-> (v2387(VarNext,bitIndex61)<->v2387(VarCurr,bitIndex61)))).
% 298.37/296.60  all VarNext (v2646(VarNext)-> (v2387(VarNext,bitIndex61)<->v2648(VarNext))).
% 298.37/296.60  all VarCurr (v2648(VarCurr)<->v2389(VarCurr)&v2649(VarCurr)).
% 298.37/296.60  all VarCurr (v2649(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex98))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex97))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex96))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex95))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex94))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex93))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex92))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex91))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex90))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex89))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex88))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex87))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex86))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex85))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex84))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex83))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex82))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex81))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex80))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex79))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex78))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex77))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex76))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex75))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex74))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex73))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex72))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex71))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex70))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex69))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex68))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex67))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex66))).
% 298.37/296.60  all VarCurr (v2646(VarCurr)<->v2647(VarCurr)&v2585(VarCurr)).
% 298.37/296.60  all VarCurr (-v2647(VarCurr)<->v2547(VarCurr)).
% 298.37/296.60  all VarNext (v803(VarNext,bitIndex61)<->v2637(VarNext,bitIndex61)).
% 298.37/296.60  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2639(VarNext)-> (all B (range_63_0(B)-> (v2637(VarNext,B)<->v803(VarCurr,B)))))).
% 298.37/296.60  all VarNext (v2639(VarNext)-> (all B (range_63_0(B)-> (v2637(VarNext,B)<->v2377(VarNext,B))))).
% 298.37/296.60  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2639(VarNext)<->v2640(VarNext)&v2370(VarNext))).
% 298.37/296.60  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2640(VarNext)<->v2642(VarNext)&v2343(VarNext))).
% 298.37/296.60  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2642(VarNext)<->v2350(VarNext))).
% 298.37/296.60  all VarNext (v959(VarNext,bitIndex61)<->v2629(VarNext,bitIndex61)).
% 298.37/296.60  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2630(VarNext)-> (all B (range_63_0(B)-> (v2629(VarNext,B)<->v959(VarCurr,B)))))).
% 298.37/296.60  all VarNext (v2630(VarNext)-> (all B (range_63_0(B)-> (v2629(VarNext,B)<->v2356(VarNext,B))))).
% 298.37/296.60  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2630(VarNext)<->v2631(VarNext))).
% 298.37/296.60  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2631(VarNext)<->v2633(VarNext)&v2343(VarNext))).
% 298.37/296.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2633(VarNext)<->v2350(VarNext))).
% 298.37/296.61  all VarCurr (v961(VarCurr,bitIndex61)<->v2339(VarCurr,bitIndex61)).
% 298.37/296.61  all VarCurr (-v2627(VarCurr)-> (v2327(VarCurr,bitIndex61)<->$F)).
% 298.37/296.61  all VarCurr (v2627(VarCurr)-> (v2327(VarCurr,bitIndex61)<->v2329(VarCurr))).
% 298.37/296.61  all VarCurr (v2627(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.37/296.61  all VarCurr (v963(VarCurr,bitIndex61)<->v2325(VarCurr,bitIndex61)).
% 298.37/296.61  all VarCurr (-v2625(VarCurr)-> (v965(VarCurr,bitIndex61)<->$F)).
% 298.37/296.61  all VarCurr (v2625(VarCurr)-> (v965(VarCurr,bitIndex61)<->v2201(VarCurr))).
% 298.37/296.61  all VarCurr (v2625(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$F)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.37/296.61  all VarCurr (v797(VarCurr,bitIndex62)<->v799(VarCurr,bitIndex62)).
% 298.37/296.61  all VarCurr (v799(VarCurr,bitIndex62)<->v2593(VarCurr,bitIndex62)).
% 298.37/296.61  all VarCurr (v801(VarCurr,bitIndex62)<->v2592(VarCurr,bitIndex62)).
% 298.37/296.61  all VarCurr (v2381(VarCurr,bitIndex62)<->v2383(VarCurr,bitIndex62)).
% 298.37/296.61  all VarCurr (v2383(VarCurr,bitIndex62)<->v2385(VarCurr,bitIndex62)).
% 298.37/296.61  all VarCurr (v2385(VarCurr,bitIndex62)<->v2387(VarCurr,bitIndex62)).
% 298.37/296.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2616(VarNext)-> (v2387(VarNext,bitIndex62)<->v2387(VarCurr,bitIndex62)))).
% 298.37/296.61  all VarNext (v2616(VarNext)-> (v2387(VarNext,bitIndex62)<->v2618(VarNext))).
% 298.37/296.61  all VarCurr (v2618(VarCurr)<->v2389(VarCurr)&v2619(VarCurr)).
% 298.37/296.61  all VarCurr (v2619(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex65))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex64))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex63))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex62))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex61))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex60))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex59))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex58))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex57))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex56))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex55))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex54))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex53))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex52))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex51))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex50))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex49))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex48))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex47))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex46))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex45))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex44))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex43))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex42))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex41))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex40))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex39))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex38))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex37))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex36))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex35))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex34))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex33))).
% 298.37/296.61  all VarCurr (v2616(VarCurr)<->v2617(VarCurr)&v2585(VarCurr)).
% 298.37/296.61  all VarCurr (-v2617(VarCurr)<->v2547(VarCurr)).
% 298.37/296.61  all VarNext (v803(VarNext,bitIndex62)<->v2607(VarNext,bitIndex62)).
% 298.37/296.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2609(VarNext)-> (all B (range_63_0(B)-> (v2607(VarNext,B)<->v803(VarCurr,B)))))).
% 298.37/296.61  all VarNext (v2609(VarNext)-> (all B (range_63_0(B)-> (v2607(VarNext,B)<->v2377(VarNext,B))))).
% 298.37/296.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2609(VarNext)<->v2610(VarNext)&v2370(VarNext))).
% 298.37/296.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2610(VarNext)<->v2612(VarNext)&v2343(VarNext))).
% 298.37/296.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2612(VarNext)<->v2350(VarNext))).
% 298.37/296.61  all VarNext (v959(VarNext,bitIndex62)<->v2599(VarNext,bitIndex62)).
% 298.37/296.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2600(VarNext)-> (all B (range_63_0(B)-> (v2599(VarNext,B)<->v959(VarCurr,B)))))).
% 298.37/296.61  all VarNext (v2600(VarNext)-> (all B (range_63_0(B)-> (v2599(VarNext,B)<->v2356(VarNext,B))))).
% 298.37/296.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2600(VarNext)<->v2601(VarNext))).
% 298.37/296.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2601(VarNext)<->v2603(VarNext)&v2343(VarNext))).
% 298.37/296.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2603(VarNext)<->v2350(VarNext))).
% 298.37/296.61  all VarCurr (v961(VarCurr,bitIndex62)<->v2339(VarCurr,bitIndex62)).
% 298.37/296.61  all VarCurr (-v2597(VarCurr)-> (v2327(VarCurr,bitIndex62)<->$F)).
% 298.37/296.61  all VarCurr (v2597(VarCurr)-> (v2327(VarCurr,bitIndex62)<->v2329(VarCurr))).
% 298.37/296.61  all VarCurr (v2597(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.37/296.61  all VarCurr (v963(VarCurr,bitIndex62)<->v2325(VarCurr,bitIndex62)).
% 298.37/296.61  all VarCurr (-v2595(VarCurr)-> (v965(VarCurr,bitIndex62)<->$F)).
% 298.37/296.61  all VarCurr (v2595(VarCurr)-> (v965(VarCurr,bitIndex62)<->v2201(VarCurr))).
% 298.37/296.61  all VarCurr (v2595(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$F)).
% 298.37/296.61  all VarCurr (v797(VarCurr,bitIndex63)<->v799(VarCurr,bitIndex63)).
% 298.37/296.61  all VarCurr (v799(VarCurr,bitIndex63)<->v2593(VarCurr,bitIndex63)).
% 298.37/296.61  all VarCurr B (range_63_0(B)-> (v2593(VarCurr,B)<->v801(VarCurr,B)&v959(VarCurr,B))).
% 298.37/296.61  all VarCurr (v801(VarCurr,bitIndex63)<->v2592(VarCurr,bitIndex63)).
% 298.37/296.61  all VarCurr B (range_63_0(B)-> (v2592(VarCurr,B)<->v803(VarCurr,B)&v2381(VarCurr,B))).
% 298.37/296.61  all VarCurr (v2381(VarCurr,bitIndex63)<->v2383(VarCurr,bitIndex63)).
% 298.37/296.61  all VarCurr (v2383(VarCurr,bitIndex63)<->v2385(VarCurr,bitIndex63)).
% 298.37/296.61  all VarCurr (v2385(VarCurr,bitIndex63)<->v2387(VarCurr,bitIndex63)).
% 298.37/296.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2583(VarNext)-> (v2387(VarNext,bitIndex63)<->v2387(VarCurr,bitIndex63)))).
% 298.37/296.61  all VarNext (v2583(VarNext)-> (v2387(VarNext,bitIndex63)<->v2586(VarNext))).
% 298.37/296.61  all VarCurr (v2586(VarCurr)<->v2389(VarCurr)&v2587(VarCurr)).
% 298.37/296.61  all VarCurr (v2587(VarCurr)<-> (v2409(VarCurr,bitIndex32)<->v2545(VarCurr,bitIndex32))& (v2409(VarCurr,bitIndex31)<->v2545(VarCurr,bitIndex31))& (v2409(VarCurr,bitIndex30)<->v2545(VarCurr,bitIndex30))& (v2409(VarCurr,bitIndex29)<->v2545(VarCurr,bitIndex29))& (v2409(VarCurr,bitIndex28)<->v2545(VarCurr,bitIndex28))& (v2409(VarCurr,bitIndex27)<->v2545(VarCurr,bitIndex27))& (v2409(VarCurr,bitIndex26)<->v2545(VarCurr,bitIndex26))& (v2409(VarCurr,bitIndex25)<->v2545(VarCurr,bitIndex25))& (v2409(VarCurr,bitIndex24)<->v2545(VarCurr,bitIndex24))& (v2409(VarCurr,bitIndex23)<->v2545(VarCurr,bitIndex23))& (v2409(VarCurr,bitIndex22)<->v2545(VarCurr,bitIndex22))& (v2409(VarCurr,bitIndex21)<->v2545(VarCurr,bitIndex21))& (v2409(VarCurr,bitIndex20)<->v2545(VarCurr,bitIndex20))& (v2409(VarCurr,bitIndex19)<->v2545(VarCurr,bitIndex19))& (v2409(VarCurr,bitIndex18)<->v2545(VarCurr,bitIndex18))& (v2409(VarCurr,bitIndex17)<->v2545(VarCurr,bitIndex17))& (v2409(VarCurr,bitIndex16)<->v2545(VarCurr,bitIndex16))& (v2409(VarCurr,bitIndex15)<->v2545(VarCurr,bitIndex15))& (v2409(VarCurr,bitIndex14)<->v2545(VarCurr,bitIndex14))& (v2409(VarCurr,bitIndex13)<->v2545(VarCurr,bitIndex13))& (v2409(VarCurr,bitIndex12)<->v2545(VarCurr,bitIndex12))& (v2409(VarCurr,bitIndex11)<->v2545(VarCurr,bitIndex11))& (v2409(VarCurr,bitIndex10)<->v2545(VarCurr,bitIndex10))& (v2409(VarCurr,bitIndex9)<->v2545(VarCurr,bitIndex9))& (v2409(VarCurr,bitIndex8)<->v2545(VarCurr,bitIndex8))& (v2409(VarCurr,bitIndex7)<->v2545(VarCurr,bitIndex7))& (v2409(VarCurr,bitIndex6)<->v2545(VarCurr,bitIndex6))& (v2409(VarCurr,bitIndex5)<->v2545(VarCurr,bitIndex5))& (v2409(VarCurr,bitIndex4)<->v2545(VarCurr,bitIndex4))& (v2409(VarCurr,bitIndex3)<->v2545(VarCurr,bitIndex3))& (v2409(VarCurr,bitIndex2)<->v2545(VarCurr,bitIndex2))& (v2409(VarCurr,bitIndex1)<->v2545(VarCurr,bitIndex1))& (v2409(VarCurr,bitIndex0)<->v2545(VarCurr,bitIndex0))).
% 298.37/296.62  v2545(constB0,bitIndex2111)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2110)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2109)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2108)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2107)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2106)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2105)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2104)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2103)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2102)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2101)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2100)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2099)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2098)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2097)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2096)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2095)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2094)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2093)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2092)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2091)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2090)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2089)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2088)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2087)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2086)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2085)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2084)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2083)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2082)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2081)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2080)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2079)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2078)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2077)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2076)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2075)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2074)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2073)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2072)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2071)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2070)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2069)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2068)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2067)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2066)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2065)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2064)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2063)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2062)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2061)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2060)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2059)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2058)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2057)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2056)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2055)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2054)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2053)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2052)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2051)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2050)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2049)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2048)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2047)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2046)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2045)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2044)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2043)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2042)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2041)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2040)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2039)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2038)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2037)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2036)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2035)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2034)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2033)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2032)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2031)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2030)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2029)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2028)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2027)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2026)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2025)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2024)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2023)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2022)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2021)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2020)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2019)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2018)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2017)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2016)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2015)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2014)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2013)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2012)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2011)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2010)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2009)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2008)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2007)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2006)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2005)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2004)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2003)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2002)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2001)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex2000)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1999)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1998)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1997)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1996)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1995)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1994)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1993)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1992)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1991)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1990)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1989)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1988)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1987)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1986)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1985)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1984)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1983)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1982)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1981)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1980)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1979)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1978)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1977)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1976)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1975)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1974)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1973)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1972)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1971)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1970)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1969)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1968)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1967)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1966)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1965)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1964)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1963)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1962)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1961)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1960)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1959)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1958)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1957)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1956)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1955)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1954)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1953)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1952)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1951)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1950)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1949)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1948)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1947)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1946)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1945)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1944)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1943)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1942)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1941)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1940)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1939)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1938)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1937)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1936)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1935)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1934)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1933)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1932)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1931)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1930)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1929)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1928)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1927)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1926)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1925)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1924)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1923)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1922)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1921)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1920)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1919)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1918)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1917)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1916)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1915)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1914)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1913)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1912)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1911)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1910)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1909)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1908)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1907)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1906)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1905)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1904)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1903)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1902)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1901)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1900)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1899)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1898)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1897)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1896)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1895)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1894)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1893)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1892)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1891)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1890)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1889)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1888)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1887)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1886)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1885)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1884)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1883)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1882)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1881)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1880)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1879)<->$F.
% 298.37/296.62  v2545(constB0,bitIndex1878)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1877)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1876)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1875)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1874)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1873)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1872)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1871)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1870)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1869)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1868)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1867)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1866)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1865)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1864)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1863)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1862)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1861)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1860)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1859)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1858)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1857)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1856)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1855)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1854)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1853)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1852)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1851)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1850)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1849)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1848)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1847)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1846)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1845)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1844)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1843)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1842)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1841)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1840)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1839)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1838)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1837)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1836)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1835)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1834)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1833)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1832)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1831)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1830)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1829)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1828)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1827)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1826)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1825)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1824)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1823)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1822)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1821)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1820)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1819)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1818)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1817)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1816)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1815)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1814)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1813)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1812)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1811)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1810)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1809)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1808)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1807)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1806)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1805)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1804)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1803)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1802)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1801)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1800)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1799)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1798)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1797)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1796)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1795)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1794)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1793)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1792)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1791)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1790)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1789)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1788)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1787)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1786)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1785)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1784)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1783)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1782)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1781)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1780)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1779)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1778)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1777)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1776)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1775)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1774)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1773)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1772)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1771)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1770)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1769)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1768)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1767)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1766)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1765)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1764)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1763)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1762)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1761)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1760)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1759)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1758)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1757)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1756)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1755)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1754)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1753)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1752)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1751)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1750)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1749)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1748)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1747)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1746)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1745)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1744)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1743)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1742)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1741)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1740)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1739)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1738)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1737)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1736)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1735)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1734)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1733)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1732)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1731)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1730)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1729)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1728)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1727)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1726)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1725)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1724)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1723)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1722)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1721)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1720)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1719)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1718)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1717)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1716)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1715)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1714)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1713)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1712)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1711)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1710)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1709)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1708)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1707)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1706)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1705)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1704)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1703)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1702)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1701)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1700)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1699)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1698)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1697)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1696)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1695)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1694)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1693)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1692)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1691)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1690)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1689)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1688)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1687)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1686)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1685)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1684)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1683)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1682)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1681)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1680)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1679)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1678)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1677)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1676)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1675)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1674)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1673)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1672)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1671)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1670)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1669)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1668)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1667)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1666)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1665)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1664)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1663)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1662)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1661)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1660)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1659)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1658)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1657)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1656)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1655)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1654)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1653)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1652)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1651)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1650)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1649)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1648)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1647)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1646)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1645)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1644)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1643)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1642)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1641)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1640)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1639)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1638)<->$F.
% 298.45/296.63  v2545(constB0,bitIndex1637)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1636)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1635)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1634)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1633)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1632)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1631)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1630)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1629)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1628)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1627)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1626)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1625)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1624)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1623)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1622)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1621)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1620)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1619)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1618)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1617)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1616)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1615)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1614)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1613)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1612)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1611)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1610)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1609)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1608)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1607)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1606)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1605)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1604)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1603)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1602)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1601)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1600)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1599)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1598)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1597)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1596)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1595)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1594)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1593)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1592)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1591)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1590)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1589)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1588)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1587)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1586)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1585)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1584)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1583)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1582)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1581)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1580)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1579)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1578)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1577)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1576)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1575)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1574)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1573)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1572)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1571)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1570)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1569)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1568)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1567)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1566)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1565)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1564)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1563)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1562)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1561)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1560)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1559)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1558)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1557)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1556)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1555)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1554)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1553)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1552)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1551)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1550)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1549)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1548)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1547)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1546)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1545)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1544)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1543)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1542)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1541)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1540)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1539)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1538)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1537)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1536)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1535)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1534)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1533)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1532)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1531)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1530)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1529)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1528)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1527)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1526)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1525)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1524)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1523)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1522)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1521)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1520)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1519)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1518)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1517)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1516)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1515)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1514)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1513)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1512)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1511)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1510)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1509)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1508)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1507)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1506)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1505)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1504)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1503)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1502)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1501)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1500)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1499)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1498)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1497)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1496)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1495)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1494)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1493)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1492)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1491)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1490)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1489)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1488)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1487)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1486)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1485)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1484)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1483)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1482)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1481)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1480)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1479)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1478)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1477)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1476)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1475)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1474)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1473)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1472)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1471)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1470)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1469)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1468)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1467)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1466)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1465)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1464)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1463)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1462)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1461)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1460)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1459)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1458)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1457)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1456)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1455)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1454)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1453)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1452)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1451)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1450)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1449)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1448)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1447)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1446)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1445)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1444)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1443)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1442)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1441)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1440)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1439)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1438)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1437)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1436)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1435)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1434)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1433)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1432)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1431)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1430)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1429)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1428)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1427)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1426)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1425)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1424)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1423)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1422)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1421)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1420)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1419)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1418)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1417)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1416)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1415)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1414)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1413)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1412)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1411)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1410)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1409)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1408)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1407)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1406)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1405)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1404)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1403)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1402)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1401)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1400)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1399)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1398)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1397)<->$F.
% 298.45/296.64  v2545(constB0,bitIndex1396)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1395)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1394)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1393)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1392)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1391)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1390)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1389)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1388)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1387)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1386)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1385)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1384)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1383)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1382)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1381)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1380)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1379)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1378)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1377)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1376)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1375)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1374)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1373)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1372)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1371)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1370)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1369)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1368)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1367)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1366)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1365)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1364)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1363)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1362)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1361)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1360)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1359)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1358)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1357)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1356)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1355)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1354)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1353)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1352)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1351)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1350)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1349)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1348)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1347)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1346)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1345)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1344)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1343)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1342)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1341)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1340)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1339)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1338)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1337)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1336)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1335)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1334)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1333)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1332)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1331)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1330)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1329)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1328)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1327)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1326)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1325)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1324)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1323)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1322)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1321)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1320)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1319)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1318)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1317)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1316)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1315)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1314)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1313)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1312)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1311)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1310)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1309)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1308)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1307)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1306)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1305)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1304)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1303)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1302)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1301)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1300)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1299)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1298)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1297)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1296)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1295)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1294)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1293)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1292)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1291)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1290)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1289)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1288)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1287)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1286)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1285)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1284)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1283)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1282)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1281)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1280)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1279)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1278)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1277)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1276)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1275)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1274)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1273)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1272)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1271)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1270)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1269)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1268)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1267)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1266)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1265)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1264)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1263)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1262)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1261)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1260)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1259)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1258)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1257)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1256)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1255)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1254)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1253)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1252)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1251)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1250)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1249)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1248)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1247)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1246)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1245)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1244)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1243)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1242)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1241)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1240)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1239)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1238)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1237)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1236)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1235)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1234)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1233)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1232)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1231)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1230)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1229)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1228)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1227)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1226)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1225)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1224)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1223)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1222)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1221)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1220)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1219)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1218)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1217)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1216)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1215)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1214)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1213)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1212)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1211)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1210)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1209)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1208)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1207)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1206)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1205)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1204)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1203)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1202)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1201)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1200)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1199)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1198)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1197)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1196)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1195)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1194)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1193)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1192)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1191)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1190)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1189)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1188)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1187)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1186)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1185)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1184)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1183)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1182)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1181)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1180)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1179)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1178)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1177)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1176)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1175)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1174)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1173)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1172)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1171)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1170)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1169)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1168)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1167)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1166)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1165)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1164)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1163)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1162)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1161)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1160)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1159)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1158)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1157)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1156)<->$F.
% 298.45/296.65  v2545(constB0,bitIndex1155)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1154)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1153)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1152)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1151)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1150)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1149)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1148)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1147)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1146)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1145)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1144)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1143)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1142)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1141)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1140)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1139)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1138)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1137)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1136)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1135)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1134)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1133)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1132)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1131)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1130)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1129)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1128)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1127)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1126)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1125)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1124)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1123)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1122)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1121)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1120)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1119)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1118)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1117)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1116)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1115)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1114)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1113)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1112)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1111)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1110)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1109)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1108)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1107)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1106)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1105)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1104)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1103)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1102)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1101)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1100)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1099)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1098)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1097)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1096)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1095)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1094)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1093)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1092)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1091)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1090)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1089)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1088)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1087)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1086)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1085)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1084)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1083)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1082)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1081)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1080)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1079)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1078)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1077)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1076)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1075)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1074)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1073)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1072)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1071)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1070)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1069)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1068)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1067)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1066)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1065)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1064)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1063)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1062)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1061)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1060)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1059)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1058)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1057)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1056)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1055)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1054)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1053)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1052)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1051)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1050)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1049)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1048)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1047)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1046)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1045)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1044)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1043)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1042)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1041)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1040)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1039)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1038)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1037)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1036)<->$F.
% 298.45/296.66  v2545(constB0,bitIndex1035)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1034)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1033)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1032)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1031)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1030)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1029)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1028)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1027)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1026)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1025)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1024)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1023)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1022)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1021)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1020)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1019)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1018)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1017)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1016)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1015)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1014)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1013)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1012)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1011)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1010)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1009)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1008)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1007)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1006)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1005)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1004)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1003)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1002)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1001)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex1000)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex999)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex998)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex997)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex996)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex995)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex994)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex993)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex992)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex991)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex990)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex989)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex988)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex987)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex986)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex985)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex984)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex983)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex982)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex981)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex980)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex979)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex978)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex977)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex976)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex975)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex974)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex973)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex972)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex971)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex970)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex969)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex968)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex967)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex966)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex965)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex964)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex963)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex962)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex961)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex960)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex959)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex958)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex957)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex956)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex955)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex954)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex953)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex952)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex951)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex950)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex949)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex948)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex947)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex946)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex945)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex944)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex943)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex942)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex941)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex940)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex939)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex938)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex937)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex936)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex935)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex934)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex933)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex932)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex931)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex930)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex929)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex928)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex927)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex926)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex925)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex924)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex923)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex922)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex921)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex920)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex919)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex918)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex917)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex916)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex915)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex914)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex913)<->$F.
% 298.48/296.66  v2545(constB0,bitIndex912)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex911)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex910)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex909)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex908)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex907)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex906)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex905)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex904)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex903)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex902)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex901)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex900)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex899)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex898)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex897)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex896)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex895)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex894)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex893)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex892)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex891)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex890)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex889)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex888)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex887)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex886)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex885)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex884)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex883)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex882)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex881)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex880)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex879)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex878)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex877)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex876)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex875)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex874)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex873)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex872)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex871)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex870)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex869)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex868)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex867)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex866)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex865)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex864)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex863)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex862)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex861)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex860)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex859)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex858)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex857)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex856)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex855)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex854)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex853)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex852)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex851)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex850)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex849)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex848)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex847)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex846)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex845)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex844)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex843)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex842)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex841)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex840)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex839)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex838)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex837)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex836)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex835)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex834)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex833)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex832)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex831)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex830)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex829)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex828)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex827)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex826)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex825)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex824)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex823)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex822)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex821)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex820)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex819)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex818)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex817)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex816)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex815)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex814)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex813)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex812)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex811)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex810)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex809)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex808)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex807)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex806)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex805)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex804)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex803)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex802)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex801)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex800)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex799)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex798)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex797)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex796)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex795)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex794)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex793)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex792)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex791)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex790)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex789)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex788)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex787)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex786)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex785)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex784)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex783)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex782)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex781)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex780)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex779)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex778)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex777)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex776)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex775)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex774)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex773)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex772)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex771)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex770)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex769)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex768)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex767)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex766)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex765)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex764)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex763)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex762)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex761)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex760)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex759)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex758)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex757)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex756)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex755)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex754)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex753)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex752)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex751)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex750)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex749)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex748)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex747)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex746)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex745)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex744)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex743)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex742)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex741)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex740)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex739)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex738)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex737)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex736)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex735)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex734)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex733)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex732)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex731)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex730)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex729)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex728)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex727)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex726)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex725)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex724)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex723)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex722)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex721)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex720)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex719)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex718)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex717)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex716)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex715)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex714)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex713)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex712)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex711)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex710)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex709)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex708)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex707)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex706)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex705)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex704)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex703)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex702)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex701)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex700)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex699)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex698)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex697)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex696)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex695)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex694)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex693)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex692)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex691)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex690)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex689)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex688)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex687)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex686)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex685)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex684)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex683)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex682)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex681)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex680)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex679)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex678)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex677)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex676)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex675)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex674)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex673)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex672)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex671)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex670)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex669)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex668)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex667)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex666)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex665)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex664)<->$F.
% 298.48/296.67  v2545(constB0,bitIndex663)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex662)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex661)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex660)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex659)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex658)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex657)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex656)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex655)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex654)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex653)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex652)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex651)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex650)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex649)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex648)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex647)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex646)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex645)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex644)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex643)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex642)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex641)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex640)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex639)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex638)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex637)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex636)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex635)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex634)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex633)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex632)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex631)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex630)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex629)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex628)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex627)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex626)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex625)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex624)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex623)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex622)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex621)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex620)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex619)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex618)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex617)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex616)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex615)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex614)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex613)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex612)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex611)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex610)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex609)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex608)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex607)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex606)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex605)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex604)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex603)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex602)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex601)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex600)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex599)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex598)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex597)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex596)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex595)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex594)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex593)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex592)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex591)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex590)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex589)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex588)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex587)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex586)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex585)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex584)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex583)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex582)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex581)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex580)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex579)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex578)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex577)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex576)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex575)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex574)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex573)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex572)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex571)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex570)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex569)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex568)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex567)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex566)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex565)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex564)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex563)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex562)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex561)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex560)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex559)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex558)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex557)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex556)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex555)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex554)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex553)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex552)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex551)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex550)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex549)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex548)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex547)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex546)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex545)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex544)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex543)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex542)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex541)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex540)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex539)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex538)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex537)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex536)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex535)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex534)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex533)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex532)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex531)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex530)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex529)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex528)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex527)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex526)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex525)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex524)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex523)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex522)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex521)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex520)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex519)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex518)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex517)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex516)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex515)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex514)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex513)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex512)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex511)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex510)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex509)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex508)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex507)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex506)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex505)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex504)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex503)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex502)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex501)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex500)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex499)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex498)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex497)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex496)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex495)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex494)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex493)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex492)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex491)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex490)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex489)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex488)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex487)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex486)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex485)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex484)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex483)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex482)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex481)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex480)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex479)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex478)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex477)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex476)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex475)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex474)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex473)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex472)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex471)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex470)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex469)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex468)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex467)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex466)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex465)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex464)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex463)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex462)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex461)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex460)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex459)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex458)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex457)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex456)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex455)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex454)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex453)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex452)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex451)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex450)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex449)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex448)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex447)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex446)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex445)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex444)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex443)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex442)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex441)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex440)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex439)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex438)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex437)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex436)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex435)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex434)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex433)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex432)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex431)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex430)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex429)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex428)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex427)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex426)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex425)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex424)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex423)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex422)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex421)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex420)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex419)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex418)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex417)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex416)<->$F.
% 298.48/296.68  v2545(constB0,bitIndex415)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex414)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex413)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex412)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex411)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex410)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex409)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex408)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex407)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex406)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex405)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex404)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex403)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex402)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex401)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex400)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex399)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex398)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex397)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex396)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex395)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex394)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex393)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex392)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex391)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex390)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex389)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex388)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex387)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex386)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex385)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex384)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex383)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex382)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex381)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex380)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex379)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex378)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex377)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex376)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex375)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex374)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex373)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex372)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex371)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex370)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex369)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex368)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex367)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex366)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex365)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex364)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex363)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex362)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex361)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex360)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex359)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex358)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex357)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex356)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex355)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex354)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex353)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex352)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex351)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex350)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex349)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex348)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex347)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex346)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex345)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex344)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex343)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex342)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex341)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex340)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex339)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex338)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex337)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex336)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex335)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex334)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex333)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex332)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex331)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex330)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex329)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex328)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex327)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex326)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex325)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex324)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex323)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex322)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex321)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex320)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex319)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex318)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex317)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex316)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex315)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex314)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex313)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex312)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex311)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex310)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex309)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex308)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex307)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex306)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex305)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex304)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex303)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex302)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex301)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex300)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex299)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex298)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex297)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex296)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex295)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex294)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex293)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex292)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex291)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex290)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex289)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex288)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex287)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex286)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex285)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex284)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex283)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex282)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex281)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex280)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex279)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex278)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex277)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex276)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex275)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex274)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex273)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex272)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex271)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex270)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex269)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex268)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex267)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex266)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex265)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex264)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex263)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex262)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex261)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex260)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex259)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex258)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex257)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex256)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex255)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex254)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex253)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex252)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex251)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex250)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex249)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex248)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex247)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex246)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex245)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex244)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex243)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex242)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex241)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex240)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex239)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex238)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex237)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex236)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex235)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex234)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex233)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex232)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex231)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex230)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex229)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex228)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex227)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex226)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex225)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex224)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex223)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex222)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex221)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex220)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex219)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex218)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex217)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex216)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex215)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex214)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex213)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex212)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex211)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex210)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex209)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex208)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex207)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex206)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex205)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex204)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex203)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex202)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex201)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex200)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex199)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex198)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex197)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex196)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex195)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex194)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex193)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex192)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex191)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex190)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex189)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex188)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex187)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex186)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex185)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex184)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex183)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex182)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex181)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex180)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex179)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex178)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex177)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex176)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex175)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex174)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex173)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex172)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex171)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex170)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex169)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex168)<->$F.
% 298.48/296.69  v2545(constB0,bitIndex167)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex166)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex165)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex164)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex163)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex162)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex161)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex160)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex159)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex158)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex157)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex156)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex155)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex154)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex153)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex152)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex151)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex150)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex149)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex148)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex147)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex146)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex145)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex144)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex143)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex142)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex141)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex140)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex139)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex138)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex137)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex136)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex135)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex134)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex133)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex132)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex131)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex130)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex129)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex128)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex127)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex126)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex125)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex124)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex123)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex122)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex121)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex120)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex119)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex118)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex117)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex116)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex115)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex114)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex113)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex112)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex111)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex110)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex109)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex108)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex107)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex106)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex105)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex104)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex103)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex102)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex101)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex100)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex99)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex98)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex97)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex96)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex95)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex94)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex93)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex92)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex91)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex90)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex89)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex88)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex87)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex86)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex85)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex84)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex83)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex82)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex81)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex80)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex79)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex78)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex77)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex76)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex75)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex74)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex73)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex72)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex71)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex70)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex69)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex68)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex67)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex66)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex65)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex64)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex63)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex62)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex61)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex60)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex59)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex58)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex57)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex56)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex55)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex54)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex53)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex52)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex51)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex50)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex49)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex48)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex47)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex46)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex45)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex44)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex43)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex42)<->$F.
% 298.48/296.70  v2545(constB0,bitIndex41)<->$F.
% 298.48/296.71  v2545(constB0,bitIndex40)<->$F.
% 298.48/296.71  v2545(constB0,bitIndex39)<->$F.
% 298.48/296.71  v2545(constB0,bitIndex38)<->$F.
% 298.48/296.71  v2545(constB0,bitIndex37)<->$F.
% 298.48/296.71  v2545(constB0,bitIndex36)<->$F.
% 298.48/296.71  v2545(constB0,bitIndex35)<->$F.
% 298.48/296.71  v2545(constB0,bitIndex34)<->$F.
% 298.48/296.71  v2545(constB0,bitIndex33)<->$F.
% 298.48/296.71  all B (range_32_0(B)-> (v2545(constB0,B)<->$F)).
% 298.48/296.71  all VarCurr (v2583(VarCurr)<->v2584(VarCurr)&v2585(VarCurr)).
% 298.48/296.71  all VarCurr (v2585(VarCurr)<->v2558(VarCurr)&v2580(VarCurr)).
% 298.48/296.71  all VarCurr (-v2584(VarCurr)<->v2547(VarCurr)).
% 298.48/296.71  v2387(constB0,bitIndex63)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex62)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex61)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex60)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex59)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex58)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex57)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex56)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex55)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex54)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex53)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex52)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex51)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex50)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex49)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex48)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex47)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex46)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex45)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex44)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex43)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex42)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex41)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex40)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex39)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex38)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex37)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex36)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex35)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex34)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex33)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex32)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex31)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex30)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex29)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex28)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex27)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex26)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex25)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex24)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex23)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex22)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex21)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex20)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex19)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex18)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex17)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex16)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex15)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex14)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex13)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex12)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex11)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex10)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex9)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex8)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex7)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex6)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex5)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex4)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex3)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex2)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex1)<->$F.
% 298.48/296.71  v2387(constB0,bitIndex0)<->$F.
% 298.48/296.71  all VarCurr (v2580(VarCurr)<->v2566(VarCurr)).
% 298.48/296.71  all VarCurr (v2558(VarCurr)<->v2560(VarCurr)).
% 298.48/296.71  all VarCurr (v2560(VarCurr)<->v2562(VarCurr)).
% 298.48/296.71  all VarCurr (v2562(VarCurr)<->v2578(VarCurr)|v2572(VarCurr)).
% 298.48/296.71  all VarCurr (v2578(VarCurr)<->v2564(VarCurr)&v2570(VarCurr)).
% 298.48/296.71  v2570(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2572(VarCurr)<->v2574(VarCurr)).
% 298.48/296.71  all VarCurr (v2574(VarCurr)<->v2576(VarCurr)).
% 298.48/296.71  all VarCurr (v2576(VarCurr)<->v67(VarCurr)).
% 298.48/296.71  all VarCurr (v2564(VarCurr)<->v2566(VarCurr)).
% 298.48/296.71  all VarCurr (v2566(VarCurr)<->v2568(VarCurr)).
% 298.48/296.71  all VarCurr (v2568(VarCurr)<->v19(VarCurr)).
% 298.48/296.71  all VarCurr (v2547(VarCurr)<->v2549(VarCurr)).
% 298.48/296.71  all VarCurr (v2549(VarCurr)<->v2401(VarCurr)|v2551(VarCurr)).
% 298.48/296.71  all VarCurr (v2551(VarCurr)<->v2553(VarCurr)).
% 298.48/296.71  all VarCurr (v2553(VarCurr)<->v2555(VarCurr)).
% 298.48/296.71  v2555(constB0)<->$F.
% 298.48/296.71  all VarCurr B (range_32_0(B)-> (v2409(VarCurr,B)<->v2411(VarCurr,B))).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex32)<->v2413(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex31)<->v2417(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex30)<->v2421(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex29)<->v2425(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex28)<->v2429(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex27)<->v2433(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex26)<->v2437(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex25)<->v2441(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex24)<->v2445(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex23)<->v2449(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex22)<->v2453(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex21)<->v2457(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex20)<->v2461(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex19)<->v2465(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex18)<->v2469(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex17)<->v2473(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex16)<->v2477(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex15)<->v2481(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex14)<->v2485(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex13)<->v2489(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex12)<->v2493(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex11)<->v2497(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex10)<->v2501(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex9)<->v2505(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex8)<->v2509(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex7)<->v2513(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex6)<->v2517(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex5)<->v2521(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex4)<->v2525(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex3)<->v2529(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex2)<->v2533(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex1)<->v2537(VarCurr)).
% 298.48/296.71  all VarCurr (v2411(VarCurr,bitIndex0)<->v2541(VarCurr)).
% 298.48/296.71  all VarCurr (v2541(VarCurr)<->v2543(VarCurr)).
% 298.48/296.71  v2543(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2537(VarCurr)<->v2539(VarCurr)).
% 298.48/296.71  v2539(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2533(VarCurr)<->v2535(VarCurr)).
% 298.48/296.71  v2535(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2529(VarCurr)<->v2531(VarCurr)).
% 298.48/296.71  v2531(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2525(VarCurr)<->v2527(VarCurr)).
% 298.48/296.71  v2527(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2521(VarCurr)<->v2523(VarCurr)).
% 298.48/296.71  v2523(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2517(VarCurr)<->v2519(VarCurr)).
% 298.48/296.71  v2519(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2513(VarCurr)<->v2515(VarCurr)).
% 298.48/296.71  v2515(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2509(VarCurr)<->v2511(VarCurr)).
% 298.48/296.71  v2511(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2505(VarCurr)<->v2507(VarCurr)).
% 298.48/296.71  v2507(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2501(VarCurr)<->v2503(VarCurr)).
% 298.48/296.71  v2503(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2497(VarCurr)<->v2499(VarCurr)).
% 298.48/296.71  v2499(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2493(VarCurr)<->v2495(VarCurr)).
% 298.48/296.71  v2495(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2489(VarCurr)<->v2491(VarCurr)).
% 298.48/296.71  v2491(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2485(VarCurr)<->v2487(VarCurr)).
% 298.48/296.71  v2487(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2481(VarCurr)<->v2483(VarCurr)).
% 298.48/296.71  v2483(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2477(VarCurr)<->v2479(VarCurr)).
% 298.48/296.71  v2479(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2473(VarCurr)<->v2475(VarCurr)).
% 298.48/296.71  v2475(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2469(VarCurr)<->v2471(VarCurr)).
% 298.48/296.71  v2471(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2465(VarCurr)<->v2467(VarCurr)).
% 298.48/296.71  v2467(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2461(VarCurr)<->v2463(VarCurr)).
% 298.48/296.71  v2463(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2457(VarCurr)<->v2459(VarCurr)).
% 298.48/296.71  v2459(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2453(VarCurr)<->v2455(VarCurr)).
% 298.48/296.71  v2455(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2449(VarCurr)<->v2451(VarCurr)).
% 298.48/296.71  v2451(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2445(VarCurr)<->v2447(VarCurr)).
% 298.48/296.71  v2447(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2441(VarCurr)<->v2443(VarCurr)).
% 298.48/296.71  v2443(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2437(VarCurr)<->v2439(VarCurr)).
% 298.48/296.71  v2439(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2433(VarCurr)<->v2435(VarCurr)).
% 298.48/296.71  v2435(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2429(VarCurr)<->v2431(VarCurr)).
% 298.48/296.71  v2431(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2425(VarCurr)<->v2427(VarCurr)).
% 298.48/296.71  v2427(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2421(VarCurr)<->v2423(VarCurr)).
% 298.48/296.71  v2423(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2417(VarCurr)<->v2419(VarCurr)).
% 298.48/296.71  v2419(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2413(VarCurr)<->v2415(VarCurr)).
% 298.48/296.71  v2415(constB0)<->$F.
% 298.48/296.71  all VarCurr (v2389(VarCurr)<->v2391(VarCurr)).
% 298.48/296.71  all VarCurr (v2391(VarCurr)<->v2393(VarCurr)&v2399(VarCurr)).
% 298.48/296.71  all VarCurr (-v2399(VarCurr)<->v2401(VarCurr)).
% 298.48/296.71  all VarCurr (v2401(VarCurr)<->v2403(VarCurr)).
% 298.48/296.71  all VarCurr (v2403(VarCurr)<->v2405(VarCurr)).
% 298.48/296.71  all VarCurr (v2405(VarCurr)<->v612(VarCurr)).
% 298.48/296.71  all VarCurr (v2393(VarCurr)<->v2395(VarCurr)).
% 298.48/296.71  all VarCurr (v2395(VarCurr)<->v2397(VarCurr)).
% 298.48/296.71  v2397(constB0)<->$F.
% 298.48/296.71  all VarNext (v803(VarNext,bitIndex63)<->v2361(VarNext,bitIndex63)).
% 298.48/296.71  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2362(VarNext)-> (all B (range_63_0(B)-> (v2361(VarNext,B)<->v803(VarCurr,B)))))).
% 298.48/296.71  all VarNext (v2362(VarNext)-> (all B (range_63_0(B)-> (v2361(VarNext,B)<->v2377(VarNext,B))))).
% 298.48/296.72  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_63_0(B)-> (v2377(VarNext,B)<->v2375(VarCurr,B))))).
% 298.48/296.72  all VarCurr (-v2371(VarCurr)-> (all B (range_63_0(B)-> (v2375(VarCurr,B)<->v959(VarCurr,B))))).
% 298.48/296.72  all VarCurr (v2371(VarCurr)-> (all B (range_63_0(B)-> (v2375(VarCurr,B)<->$F)))).
% 298.48/296.72  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2362(VarNext)<->v2363(VarNext)&v2370(VarNext))).
% 298.48/296.72  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2370(VarNext)<->v2368(VarCurr))).
% 298.48/296.72  all VarCurr (v2368(VarCurr)<->v2371(VarCurr)|v2372(VarCurr)).
% 298.48/296.72  all VarCurr (v2372(VarCurr)<->v2373(VarCurr)&v2374(VarCurr)).
% 298.48/296.72  all VarCurr (-v2374(VarCurr)<->v2371(VarCurr)).
% 298.48/296.72  all VarCurr (-v2373(VarCurr)<->v807(VarCurr)).
% 298.48/296.72  all VarCurr (-v2371(VarCurr)<->v805(VarCurr)).
% 298.48/296.72  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2363(VarNext)<->v2364(VarNext)&v2343(VarNext))).
% 298.48/296.72  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2364(VarNext)<->v2350(VarNext))).
% 298.48/296.72  all B (range_63_0(B)-> (v803(constB0,B)<->$F)).
% 298.48/296.72  all VarNext (v959(VarNext,bitIndex63)<->v2345(VarNext,bitIndex63)).
% 298.48/296.72  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2346(VarNext)-> (all B (range_63_0(B)-> (v2345(VarNext,B)<->v959(VarCurr,B)))))).
% 298.48/296.72  all VarNext (v2346(VarNext)-> (all B (range_63_0(B)-> (v2345(VarNext,B)<->v2356(VarNext,B))))).
% 298.48/296.72  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_63_0(B)-> (v2356(VarNext,B)<->v2354(VarCurr,B))))).
% 298.48/296.72  all VarCurr (-v2357(VarCurr)-> (all B (range_63_0(B)-> (v2354(VarCurr,B)<->v961(VarCurr,B))))).
% 298.48/296.72  all VarCurr (v2357(VarCurr)-> (all B (range_63_0(B)-> (v2354(VarCurr,B)<->$F)))).
% 298.48/296.72  all VarCurr (-v2357(VarCurr)<->v805(VarCurr)).
% 298.48/296.72  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2346(VarNext)<->v2347(VarNext))).
% 298.48/296.72  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2347(VarNext)<->v2348(VarNext)&v2343(VarNext))).
% 298.48/296.72  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2348(VarNext)<->v2350(VarNext))).
% 298.48/296.72  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2350(VarNext)<->v2343(VarCurr))).
% 298.48/296.72  all VarCurr (v2343(VarCurr)<->v274(VarCurr)).
% 298.48/296.72  all VarCurr (v961(VarCurr,bitIndex63)<->v2339(VarCurr,bitIndex63)).
% 298.48/296.72  all VarCurr B (range_63_0(B)-> (v2339(VarCurr,B)<->v2340(VarCurr,B)|v2327(VarCurr,B))).
% 298.48/296.72  all VarCurr B (range_63_0(B)-> (v2340(VarCurr,B)<->v959(VarCurr,B)&v2341(VarCurr,B))).
% 298.48/296.72  all VarCurr B (range_63_0(B)-> (v2341(VarCurr,B)<-> -v963(VarCurr,B))).
% 298.48/296.72  all B (range_63_0(B)-> (v959(constB0,B)<->$F)).
% 298.48/296.72  all VarCurr (-v2338(VarCurr)-> (v2327(VarCurr,bitIndex63)<->$F)).
% 298.48/296.72  all VarCurr (v2338(VarCurr)-> (v2327(VarCurr,bitIndex63)<->v2329(VarCurr))).
% 298.48/296.72  all VarCurr (v2338(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.48/296.72  all VarCurr (v2329(VarCurr)<->v2331(VarCurr)|v2334(VarCurr)).
% 298.48/296.72  all VarCurr (v2334(VarCurr)<->v2303(VarCurr)&v2313(VarCurr,bitIndex0)).
% 298.48/296.72  all VarCurr (v2331(VarCurr)<->v2205(VarCurr)&v2227(VarCurr)).
% 298.48/296.72  all VarCurr (v963(VarCurr,bitIndex63)<->v2325(VarCurr,bitIndex63)).
% 298.48/296.72  all VarCurr B (range_63_0(B)-> (v2325(VarCurr,B)<->v965(VarCurr,B)|v1011(VarCurr,B))).
% 298.48/296.72  all VarCurr (-v2324(VarCurr)-> (v965(VarCurr,bitIndex63)<->$F)).
% 298.48/296.72  all VarCurr (v2324(VarCurr)-> (v965(VarCurr,bitIndex63)<->v2201(VarCurr))).
% 298.48/296.72  all VarCurr (v2324(VarCurr)<-> (v967(VarCurr,bitIndex5)<->$T)& (v967(VarCurr,bitIndex4)<->$T)& (v967(VarCurr,bitIndex3)<->$T)& (v967(VarCurr,bitIndex2)<->$T)& (v967(VarCurr,bitIndex1)<->$T)& (v967(VarCurr,bitIndex0)<->$T)).
% 298.48/296.72  all VarCurr (v2201(VarCurr)<->v2203(VarCurr)|v2301(VarCurr)).
% 298.48/296.72  all VarCurr (v2301(VarCurr)<->v2303(VarCurr)&v2321(VarCurr)).
% 298.48/296.72  all VarCurr (-v2321(VarCurr)<->v2313(VarCurr,bitIndex0)).
% 298.48/296.72  all VarCurr (v2313(VarCurr,bitIndex0)<->v2315(VarCurr,bitIndex0)).
% 298.48/296.72  all VarCurr (v2315(VarCurr,bitIndex0)<->v2317(VarCurr,bitIndex0)).
% 298.48/296.72  all VarCurr (v2317(VarCurr,bitIndex0)<->v2319(VarCurr,bitIndex0)).
% 298.48/296.72  all VarCurr (v2319(VarCurr,bitIndex0)<->v543(VarCurr,bitIndex0)).
% 298.48/296.72  all VarCurr (v2303(VarCurr)<->v2305(VarCurr)).
% 298.48/296.72  all VarCurr (v2305(VarCurr)<->v2307(VarCurr)).
% 298.48/296.72  all VarCurr (v2307(VarCurr)<->v2309(VarCurr)).
% 298.55/296.73  all VarCurr (v2309(VarCurr)<->v2311(VarCurr)).
% 298.55/296.73  v2311(constB0)<->$F.
% 298.55/296.73  all VarCurr (v2203(VarCurr)<->v2205(VarCurr)&v2299(VarCurr)).
% 298.55/296.73  all VarCurr (-v2299(VarCurr)<->v2227(VarCurr)).
% 298.55/296.73  all VarCurr (v2227(VarCurr)<->v2229(VarCurr)).
% 298.55/296.73  all VarCurr (v2229(VarCurr)<->v2231(VarCurr)).
% 298.55/296.73  all VarCurr (v2231(VarCurr)<->v2233(VarCurr)).
% 298.55/296.73  all VarCurr (v2233(VarCurr)<->v2235(VarCurr)).
% 298.55/296.73  all VarCurr (v2235(VarCurr)<->v2296(VarCurr)&v2297(VarCurr)).
% 298.55/296.73  all VarCurr (-v2297(VarCurr)<->v2242(VarCurr)).
% 298.55/296.73  all VarCurr (v2296(VarCurr)<->v2237(VarCurr)|v2239(VarCurr)).
% 298.55/296.73  all VarCurr (-v2291(VarCurr)-> (v2242(VarCurr)<->$F)).
% 298.55/296.73  all VarCurr (v2291(VarCurr)-> (v2242(VarCurr)<->v2294(VarCurr))).
% 298.55/296.73  all VarCurr (-v268(VarCurr)& -v991(VarCurr)& -v2223(VarCurr)-> (v2294(VarCurr)<->$T)).
% 298.55/296.73  all VarCurr (v2223(VarCurr)-> (v2294(VarCurr)<->v2244(VarCurr))).
% 298.55/296.73  all VarCurr (v991(VarCurr)-> (v2294(VarCurr)<->$T)).
% 298.55/296.73  all VarCurr (v268(VarCurr)-> (v2294(VarCurr)<->v2244(VarCurr))).
% 298.55/296.73  all VarCurr (v2291(VarCurr)<->v2292(VarCurr)|v2224(VarCurr)).
% 298.55/296.73  all VarCurr (v2292(VarCurr)<->v2293(VarCurr)|v2223(VarCurr)).
% 298.55/296.73  all VarCurr (v2293(VarCurr)<->v268(VarCurr)|v991(VarCurr)).
% 298.55/296.73  all VarCurr (v2244(VarCurr)<->v2237(VarCurr)&v2246(VarCurr)).
% 298.55/296.73  v2237(constB0)<->$F.
% 298.55/296.73  all VarCurr (v2246(VarCurr)<->v2248(VarCurr)).
% 298.55/296.73  all VarCurr (v2248(VarCurr)<->v2250(VarCurr)).
% 298.55/296.73  all VarCurr (v2250(VarCurr)<->v2252(VarCurr)).
% 298.55/296.73  all VarCurr (v2252(VarCurr)<->v2254(VarCurr)|v2264(VarCurr)).
% 298.55/296.73  all VarCurr (v2264(VarCurr)<->v2266(VarCurr)&v2287(VarCurr)).
% 298.55/296.73  all VarCurr (v2287(VarCurr)<-> (v2272(VarCurr,bitIndex32)<->v2279(VarCurr,bitIndex32))& (v2272(VarCurr,bitIndex31)<->v2279(VarCurr,bitIndex31))& (v2272(VarCurr,bitIndex30)<->v2279(VarCurr,bitIndex30))& (v2272(VarCurr,bitIndex29)<->v2279(VarCurr,bitIndex29))& (v2272(VarCurr,bitIndex28)<->v2279(VarCurr,bitIndex28))& (v2272(VarCurr,bitIndex27)<->v2279(VarCurr,bitIndex27))& (v2272(VarCurr,bitIndex26)<->v2279(VarCurr,bitIndex26))& (v2272(VarCurr,bitIndex25)<->v2279(VarCurr,bitIndex25))& (v2272(VarCurr,bitIndex24)<->v2279(VarCurr,bitIndex24))& (v2272(VarCurr,bitIndex23)<->v2279(VarCurr,bitIndex23))& (v2272(VarCurr,bitIndex22)<->v2279(VarCurr,bitIndex22))& (v2272(VarCurr,bitIndex21)<->v2279(VarCurr,bitIndex21))& (v2272(VarCurr,bitIndex20)<->v2279(VarCurr,bitIndex20))& (v2272(VarCurr,bitIndex19)<->v2279(VarCurr,bitIndex19))& (v2272(VarCurr,bitIndex18)<->v2279(VarCurr,bitIndex18))& (v2272(VarCurr,bitIndex17)<->v2279(VarCurr,bitIndex17))& (v2272(VarCurr,bitIndex16)<->v2279(VarCurr,bitIndex16))& (v2272(VarCurr,bitIndex15)<->v2279(VarCurr,bitIndex15))& (v2272(VarCurr,bitIndex14)<->v2279(VarCurr,bitIndex14))& (v2272(VarCurr,bitIndex13)<->v2279(VarCurr,bitIndex13))& (v2272(VarCurr,bitIndex12)<->v2279(VarCurr,bitIndex12))& (v2272(VarCurr,bitIndex11)<->v2279(VarCurr,bitIndex11))& (v2272(VarCurr,bitIndex10)<->v2279(VarCurr,bitIndex10))& (v2272(VarCurr,bitIndex9)<->v2279(VarCurr,bitIndex9))& (v2272(VarCurr,bitIndex8)<->v2279(VarCurr,bitIndex8))& (v2272(VarCurr,bitIndex7)<->v2279(VarCurr,bitIndex7))& (v2272(VarCurr,bitIndex6)<->v2279(VarCurr,bitIndex6))& (v2272(VarCurr,bitIndex5)<->v2279(VarCurr,bitIndex5))& (v2272(VarCurr,bitIndex4)<->v2279(VarCurr,bitIndex4))& (v2272(VarCurr,bitIndex3)<->v2279(VarCurr,bitIndex3))& (v2272(VarCurr,bitIndex2)<->v2279(VarCurr,bitIndex2))& (v2272(VarCurr,bitIndex1)<->v2279(VarCurr,bitIndex1))& (v2272(VarCurr,bitIndex0)<->v2279(VarCurr,bitIndex0))).
% 298.55/296.73  all VarCurr B (range_32_0(B)-> (v2279(VarCurr,B)<->v2281(VarCurr,B))).
% 298.55/296.73  all VarCurr B (range_32_0(B)-> (v2281(VarCurr,B)<->v2283(VarCurr,B))).
% 298.55/296.73  all VarCurr B (range_32_0(B)-> (v2283(VarCurr,B)<->v2285(VarCurr,B))).
% 298.55/296.73  all B (range_32_0(B)-> (v2285(constB0,B)<->$F)).
% 298.55/296.73  all VarCurr B (range_32_0(B)-> (v2272(VarCurr,B)<->v2274(VarCurr,B))).
% 298.55/296.73  all VarCurr B (range_32_0(B)-> (v2274(VarCurr,B)<->v2276(VarCurr,B))).
% 298.55/296.73  all B (range_32_0(B)-> (v2276(constB0,B)<->$F)).
% 298.55/296.73  all B (range_32_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B|bitIndex6=B|bitIndex7=B|bitIndex8=B|bitIndex9=B|bitIndex10=B|bitIndex11=B|bitIndex12=B|bitIndex13=B|bitIndex14=B|bitIndex15=B|bitIndex16=B|bitIndex17=B|bitIndex18=B|bitIndex19=B|bitIndex20=B|bitIndex21=B|bitIndex22=B|bitIndex23=B|bitIndex24=B|bitIndex25=B|bitIndex26=B|bitIndex27=B|bitIndex28=B|bitIndex29=B|bitIndex30=B|bitIndex31=B|bitIndex32=B).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex32).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex31).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex30).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex29).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex28).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex27).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex26).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex25).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex24).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex23).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex22).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex21).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex20).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex19).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex18).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex17).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex16).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex15).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex14).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex13).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex12).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex11).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex10).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex9).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex8).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex7).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex6).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex5).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex4).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex3).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex2).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex1).
% 298.55/296.74  -b000000000000000000000000000000000(bitIndex0).
% 298.55/296.74  all VarCurr (v2266(VarCurr)<->v2268(VarCurr)).
% 298.55/296.74  all VarCurr (v2268(VarCurr)<->v2270(VarCurr)).
% 298.55/296.74  v2270(constB0)<->$F.
% 298.55/296.74  all VarCurr (v2254(VarCurr)<->v2256(VarCurr)).
% 298.55/296.74  all VarCurr (v2256(VarCurr)<->v2258(VarCurr)).
% 298.55/296.74  all VarCurr (v2258(VarCurr)<->v2260(VarCurr)).
% 298.55/296.74  all VarCurr (v2260(VarCurr)<->v2262(VarCurr)).
% 298.55/296.74  v2262(constB0)<->$F.
% 298.55/296.74  all VarCurr (-v347(VarCurr)-> (v2239(VarCurr)<->$F)).
% 298.55/296.74  all VarCurr (v347(VarCurr)-> (v2239(VarCurr)<->$T)).
% 298.55/296.74  all VarCurr (v2205(VarCurr)<->v2207(VarCurr)).
% 298.55/296.74  all VarCurr (v2207(VarCurr)<->v2209(VarCurr)).
% 298.55/296.74  all VarCurr (v2209(VarCurr)<->v2211(VarCurr)).
% 298.55/296.74  all VarCurr (v2211(VarCurr)<->v2213(VarCurr)).
% 298.55/296.74  all VarCurr (-v2215(VarCurr)-> (v2213(VarCurr)<->$F)).
% 298.55/296.74  all VarCurr (v2215(VarCurr)-> (v2213(VarCurr)<->v2225(VarCurr))).
% 298.55/296.74  all VarCurr (v2225(VarCurr)<->v222(VarCurr,bitIndex0)&v222(VarCurr,bitIndex1)).
% 298.55/296.74  all VarCurr (v2215(VarCurr)<->v2216(VarCurr)|v2224(VarCurr)).
% 298.55/296.74  all VarCurr (v2224(VarCurr)<-> (v237(VarCurr,bitIndex2)<->$T)& (v237(VarCurr,bitIndex1)<->$T)& (v237(VarCurr,bitIndex0)<->$T)).
% 298.55/296.74  all VarCurr (v2216(VarCurr)<->v2217(VarCurr)|v2223(VarCurr)).
% 298.55/296.74  all VarCurr (v2223(VarCurr)<-> (v237(VarCurr,bitIndex2)<->$T)& (v237(VarCurr,bitIndex1)<->$T)& (v237(VarCurr,bitIndex0)<->$F)).
% 298.55/296.74  all VarCurr (v2217(VarCurr)<->v2218(VarCurr)|v2222(VarCurr)).
% 298.55/296.74  all VarCurr (v2222(VarCurr)<-> (v237(VarCurr,bitIndex2)<->$T)& (v237(VarCurr,bitIndex1)<->$F)& (v237(VarCurr,bitIndex0)<->$T)).
% 298.55/296.74  all VarCurr (v2218(VarCurr)<->v2219(VarCurr)|v2221(VarCurr)).
% 298.55/296.74  all VarCurr (v2221(VarCurr)<-> (v237(VarCurr,bitIndex2)<->$T)& (v237(VarCurr,bitIndex1)<->$F)& (v237(VarCurr,bitIndex0)<->$F)).
% 298.55/296.74  all VarCurr (v2219(VarCurr)<->v347(VarCurr)|v2220(VarCurr)).
% 298.55/296.74  all VarCurr (v2220(VarCurr)<-> (v237(VarCurr,bitIndex2)<->$F)& (v237(VarCurr,bitIndex1)<->$T)& (v237(VarCurr,bitIndex0)<->$T)).
% 298.55/296.74  all VarCurr (-v969(VarCurr)-> (all B (range_5_0(B)-> (v967(VarCurr,B)<->v1202(VarCurr,B))))).
% 298.55/296.74  all VarCurr (v969(VarCurr)-> (all B (range_5_0(B)-> (v967(VarCurr,B)<->v978(VarCurr,B))))).
% 298.55/296.74  all VarCurr B (range_5_0(B)-> (v1202(VarCurr,B)<->v1204(VarCurr,B))).
% 298.55/296.74  all VarCurr B (range_5_0(B)-> (v1204(VarCurr,B)<->v1206(VarCurr,B))).
% 298.55/296.74  all VarCurr B (range_5_0(B)-> (v1206(VarCurr,B)<->v1208(VarCurr,B))).
% 298.55/296.74  all VarCurr B (range_5_0(B)-> (v1208(VarCurr,B)<->v1210(VarCurr,B))).
% 298.55/296.74  all VarNext B (range_5_0(B)-> (v1210(VarNext,B)<->v2187(VarNext,B))).
% 298.55/296.74  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2188(VarNext)-> (all B (range_8_0(B)-> (v2187(VarNext,B)<->v1210(VarCurr,B)))))).
% 298.55/296.75  all VarNext (v2188(VarNext)-> (all B (range_8_0(B)-> (v2187(VarNext,B)<->v2196(VarNext,B))))).
% 298.55/296.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_8_0(B)-> (v2196(VarNext,B)<->v2194(VarCurr,B))))).
% 298.55/296.75  all VarCurr (-v728(VarCurr)-> (all B (range_8_0(B)-> (v2194(VarCurr,B)<->v1212(VarCurr,B))))).
% 298.55/296.75  all VarCurr (v728(VarCurr)-> (all B (range_8_0(B)-> (v2194(VarCurr,B)<->$F)))).
% 298.55/296.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2188(VarNext)<->v2189(VarNext))).
% 298.55/296.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2189(VarNext)<->v2191(VarNext)&v712(VarNext))).
% 298.55/296.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2191(VarNext)<->v721(VarNext))).
% 298.55/296.75  all B (range_8_0(B)-> (v1210(constB0,B)<->$F)).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1212(VarCurr,B)<->v1214(VarCurr,B))).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1214(VarCurr,B)<->v1216(VarCurr,B))).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1216(VarCurr,B)<->v1218(VarCurr,B))).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1218(VarCurr,B)<->v1220(VarCurr,B))).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1220(VarCurr,B)<->v1222(VarCurr,B))).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1222(VarCurr,B)<->v1224(VarCurr,B))).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1224(VarCurr,B)<->v1226(VarCurr,B))).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1226(VarCurr,B)<->v1228(VarCurr,B))).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1228(VarCurr,B)<->v1230(VarCurr,B))).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1230(VarCurr,B)<->v1232(VarCurr,B))).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1232(VarCurr,B)<->v1234(VarCurr,B))).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1236(VarCurr,B)<->v1238(VarCurr,B))).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1238(VarCurr,B)<->v1240(VarCurr,B))).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1240(VarCurr,B)<->v1242(VarCurr,B))).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1242(VarCurr,B)<->v1244(VarCurr,B))).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1244(VarCurr,B)<->v1246(VarCurr,B))).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1246(VarCurr,B)<->v1248(VarCurr,B))).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1248(VarCurr,B)<->v1250(VarCurr,B))).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1250(VarCurr,B)<->v1252(VarCurr,B))).
% 298.55/296.75  all VarNext B (range_5_0(B)-> (v1252(VarNext,B)<->v2179(VarNext,B))).
% 298.55/296.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2181(VarNext)-> (all B (range_26_0(B)-> (v2179(VarNext,B)<->v1252(VarCurr,B)))))).
% 298.55/296.75  all VarNext (v2181(VarNext)-> (all B (range_26_0(B)-> (v2179(VarNext,B)<->v1353(VarNext,B))))).
% 298.55/296.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2181(VarNext)<->v2182(VarNext)&v1347(VarNext))).
% 298.55/296.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2182(VarNext)<->v2184(VarNext)&v1332(VarNext))).
% 298.55/296.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2184(VarNext)<->v1341(VarNext))).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1267(VarCurr,B)<->v1269(VarCurr,B))).
% 298.55/296.75  all VarCurr B (range_5_0(B)-> (v1269(VarCurr,B)<->v1271(VarCurr,B))).
% 298.55/296.75  all VarCurr (v1259(VarCurr,bitIndex2)<->v2162(VarCurr,bitIndex2)).
% 298.55/296.75  all VarCurr (-v2163(VarCurr)-> (all B (range_2_0(B)-> (v2162(VarCurr,B)<->$F)))).
% 298.55/296.75  all VarCurr (v2163(VarCurr)-> (all B (range_2_0(B)-> (v2162(VarCurr,B)<->v2176(VarCurr,B))))).
% 298.55/296.75  all VarCurr (-v2167(VarCurr)& -v2168(VarCurr)& -v2169(VarCurr)-> (all B (range_2_0(B)-> (v2176(VarCurr,B)<->b100(B))))).
% 298.55/296.75  all VarCurr (v2169(VarCurr)-> (all B (range_2_0(B)-> (v2176(VarCurr,B)<->b001(B))))).
% 298.55/296.75  all VarCurr (v2168(VarCurr)-> (all B (range_2_0(B)-> (v2176(VarCurr,B)<->b010(B))))).
% 298.55/296.75  all VarCurr (v2167(VarCurr)-> (all B (range_2_0(B)-> (v2176(VarCurr,B)<->b100(B))))).
% 298.55/296.75  all VarCurr (v2177(VarCurr)<->v2170(VarCurr)|v2173(VarCurr)).
% 298.55/296.75  all VarCurr (v2163(VarCurr)<->v2164(VarCurr)|v2173(VarCurr)).
% 298.55/296.75  all VarCurr (v2173(VarCurr)<->v2174(VarCurr)&v2175(VarCurr)).
% 298.55/296.75  all VarCurr (v2175(VarCurr)<-> (v1261(VarCurr,bitIndex2)<->$T)& (v1261(VarCurr,bitIndex1)<->$F)& (v1261(VarCurr,bitIndex0)<->$F)).
% 298.55/296.75  all VarCurr (v2174(VarCurr)<-> (v1263(VarCurr,bitIndex2)<->$F)& (v1263(VarCurr,bitIndex1)<->$F)& (v1263(VarCurr,bitIndex0)<->$T)).
% 298.55/296.75  all VarCurr (v2164(VarCurr)<->v2165(VarCurr)|v2170(VarCurr)).
% 298.55/296.75  all VarCurr (v2170(VarCurr)<->v2171(VarCurr)&v2172(VarCurr)).
% 298.55/296.75  all VarCurr (v2172(VarCurr)<-> (v1261(VarCurr,bitIndex2)<->$F)& (v1261(VarCurr,bitIndex1)<->$T)& (v1261(VarCurr,bitIndex0)<->$T)).
% 298.55/296.75  all VarCurr (v2171(VarCurr)<-> (v1263(VarCurr,bitIndex2)<->$F)& (v1263(VarCurr,bitIndex1)<->$F)& (v1263(VarCurr,bitIndex0)<->$T)).
% 298.55/296.75  all VarCurr (v2165(VarCurr)<->v2166(VarCurr)|v2169(VarCurr)).
% 298.55/296.75  all VarCurr (v2169(VarCurr)<-> (v1261(VarCurr,bitIndex2)<->$F)& (v1261(VarCurr,bitIndex1)<->$T)& (v1261(VarCurr,bitIndex0)<->$F)).
% 298.55/296.75  all VarCurr (v2166(VarCurr)<->v2167(VarCurr)|v2168(VarCurr)).
% 298.55/296.75  all VarCurr (v2168(VarCurr)<-> (v1261(VarCurr,bitIndex2)<->$F)& (v1261(VarCurr,bitIndex1)<->$F)& (v1261(VarCurr,bitIndex0)<->$T)).
% 298.55/296.75  all VarCurr (v2167(VarCurr)<-> (v1261(VarCurr,bitIndex2)<->$F)& (v1261(VarCurr,bitIndex1)<->$F)& (v1261(VarCurr,bitIndex0)<->$F)).
% 298.55/296.75  all VarCurr (-v2081(VarCurr)& -v2094(VarCurr)& -v2095(VarCurr)& -v2096(VarCurr)& -v2133(VarCurr)& -v2159(VarCurr)& -v2160(VarCurr)-> (all B (range_2_0(B)-> (v1263(VarCurr,B)<->$F)))).
% 298.55/296.75  all VarCurr (v2160(VarCurr)-> (all B (range_2_0(B)-> (v1263(VarCurr,B)<->$T)))).
% 298.55/296.75  all VarCurr (v2159(VarCurr)-> (all B (range_2_0(B)-> (v1263(VarCurr,B)<->b110(B))))).
% 298.55/296.75  all VarCurr (v2133(VarCurr)-> (all B (range_2_0(B)-> (v1263(VarCurr,B)<->v2134(VarCurr,B))))).
% 298.55/296.75  all VarCurr (v2096(VarCurr)-> (all B (range_2_0(B)-> (v1263(VarCurr,B)<->v2097(VarCurr,B))))).
% 298.55/296.75  all VarCurr (v2095(VarCurr)-> (all B (range_2_0(B)-> (v1263(VarCurr,B)<->b011(B))))).
% 298.55/296.75  all VarCurr (v2094(VarCurr)-> (all B (range_2_0(B)-> (v1263(VarCurr,B)<->b010(B))))).
% 298.55/296.75  all VarCurr (v2081(VarCurr)-> (all B (range_2_0(B)-> (v1263(VarCurr,B)<->v2082(VarCurr,B))))).
% 298.55/296.75  all VarCurr (v2161(VarCurr)<-> (v1261(VarCurr,bitIndex2)<->$T)& (v1261(VarCurr,bitIndex1)<->$T)& (v1261(VarCurr,bitIndex0)<->$T)).
% 298.55/296.75  all VarCurr (v2160(VarCurr)<-> (v1261(VarCurr,bitIndex2)<->$T)& (v1261(VarCurr,bitIndex1)<->$T)& (v1261(VarCurr,bitIndex0)<->$F)).
% 298.55/296.75  all VarCurr (v2159(VarCurr)<-> (v1261(VarCurr,bitIndex2)<->$T)& (v1261(VarCurr,bitIndex1)<->$F)& (v1261(VarCurr,bitIndex0)<->$T)).
% 298.55/296.75  all VarCurr (-v2135(VarCurr)& -v2136(VarCurr)& -v2139(VarCurr)-> (all B (range_2_0(B)-> (v2134(VarCurr,B)<->v2147(VarCurr,B))))).
% 298.55/296.75  all VarCurr (v2139(VarCurr)-> (all B (range_2_0(B)-> (v2134(VarCurr,B)<->b110(B))))).
% 298.55/296.75  all VarCurr (v2136(VarCurr)-> (all B (range_2_0(B)-> (v2134(VarCurr,B)<->b001(B))))).
% 298.55/296.75  all VarCurr (v2135(VarCurr)-> (all B (range_2_0(B)-> (v2134(VarCurr,B)<->$F)))).
% 298.55/296.75  all VarCurr (-v2148(VarCurr)-> (all B (range_2_0(B)-> (v2147(VarCurr,B)<->b101(B))))).
% 298.55/296.75  all VarCurr (v2148(VarCurr)-> (all B (range_2_0(B)-> (v2147(VarCurr,B)<->b100(B))))).
% 298.55/296.75  all VarCurr (v2150(VarCurr)<->v2152(VarCurr)|v2157(VarCurr)).
% 298.55/296.75  all VarCurr (v2157(VarCurr)<-> (v2158(VarCurr,bitIndex1)<->$T)& (v2158(VarCurr,bitIndex0)<->$T)).
% 298.55/296.75  all VarCurr (v2158(VarCurr,bitIndex0)<->v1578(VarCurr)).
% 298.55/296.75  all VarCurr (v2158(VarCurr,bitIndex1)<->v1539(VarCurr)).
% 298.55/296.75  all VarCurr (v2152(VarCurr)<->v2153(VarCurr)|v2155(VarCurr)).
% 298.55/296.75  all VarCurr (v2155(VarCurr)<-> (v2156(VarCurr,bitIndex1)<->$T)& (v2156(VarCurr,bitIndex0)<->$F)).
% 298.55/296.75  all VarCurr (v2156(VarCurr,bitIndex0)<->v1578(VarCurr)).
% 298.55/296.75  all VarCurr (v2156(VarCurr,bitIndex1)<->v1539(VarCurr)).
% 298.55/296.75  all VarCurr (v2153(VarCurr)<-> (v2154(VarCurr,bitIndex1)<->$F)& (v2154(VarCurr,bitIndex0)<->$T)).
% 298.55/296.75  all VarCurr (v2154(VarCurr,bitIndex0)<->v1578(VarCurr)).
% 298.55/296.75  all VarCurr (v2154(VarCurr,bitIndex1)<->v1539(VarCurr)).
% 298.55/296.75  all VarCurr (v2148(VarCurr)<-> (v2149(VarCurr,bitIndex1)<->$F)& (v2149(VarCurr,bitIndex0)<->$F)).
% 298.55/296.75  all VarCurr (v2149(VarCurr,bitIndex0)<->v1578(VarCurr)).
% 298.55/296.75  all VarCurr (v2149(VarCurr,bitIndex1)<->v1539(VarCurr)).
% 298.55/296.75  all VarCurr (v2139(VarCurr)<->v2141(VarCurr)|v2146(VarCurr)).
% 298.55/296.75  all VarCurr (v2146(VarCurr)<-> (v1265(VarCurr,bitIndex2)<->$T)& (v1265(VarCurr,bitIndex1)<->$T)& (v1265(VarCurr,bitIndex0)<->$T)).
% 298.55/296.75  all VarCurr (v2141(VarCurr)<->v2142(VarCurr)|v2145(VarCurr)).
% 298.55/296.75  all VarCurr (v2145(VarCurr)<-> (v1265(VarCurr,bitIndex2)<->$T)& (v1265(VarCurr,bitIndex1)<->$T)& (v1265(VarCurr,bitIndex0)<->$F)).
% 298.55/296.75  all VarCurr (v2142(VarCurr)<->v2143(VarCurr)|v2144(VarCurr)).
% 298.55/296.75  all VarCurr (v2144(VarCurr)<-> (v1265(VarCurr,bitIndex2)<->$T)& (v1265(VarCurr,bitIndex1)<->$F)& (v1265(VarCurr,bitIndex0)<->$T)).
% 298.55/296.75  all VarCurr (v2143(VarCurr)<-> (v1265(VarCurr,bitIndex2)<->$T)& (v1265(VarCurr,bitIndex1)<->$F)& (v1265(VarCurr,bitIndex0)<->$F)).
% 298.55/296.76  all VarCurr (v2136(VarCurr)<->v2137(VarCurr)|v2138(VarCurr)).
% 298.55/296.76  all VarCurr (v2138(VarCurr)<-> (v1265(VarCurr,bitIndex2)<->$F)& (v1265(VarCurr,bitIndex1)<->$T)& (v1265(VarCurr,bitIndex0)<->$T)).
% 298.55/296.76  all VarCurr (v2137(VarCurr)<-> (v1265(VarCurr,bitIndex2)<->$F)& (v1265(VarCurr,bitIndex1)<->$T)& (v1265(VarCurr,bitIndex0)<->$F)).
% 298.55/296.76  all VarCurr (v2135(VarCurr)<-> (v1265(VarCurr,bitIndex2)<->$F)& (v1265(VarCurr,bitIndex1)<->$F)& (v1265(VarCurr,bitIndex0)<->$T)).
% 298.55/296.76  all VarCurr (v2133(VarCurr)<-> (v1261(VarCurr,bitIndex2)<->$T)& (v1261(VarCurr,bitIndex1)<->$F)& (v1261(VarCurr,bitIndex0)<->$F)).
% 298.55/296.76  all VarCurr (-v2098(VarCurr)& -v2099(VarCurr)& -v2102(VarCurr)-> (all B (range_2_0(B)-> (v2097(VarCurr,B)<->v2110(VarCurr,B))))).
% 298.55/296.76  all VarCurr (v2102(VarCurr)-> (all B (range_2_0(B)-> (v2097(VarCurr,B)<->b110(B))))).
% 298.55/296.76  all VarCurr (v2099(VarCurr)-> (all B (range_2_0(B)-> (v2097(VarCurr,B)<->b001(B))))).
% 298.55/296.76  all VarCurr (v2098(VarCurr)-> (all B (range_2_0(B)-> (v2097(VarCurr,B)<->$F)))).
% 298.55/296.76  all VarCurr (-v2111(VarCurr)& -v2113(VarCurr)& -v2122(VarCurr)-> (all B (range_2_0(B)-> (v2110(VarCurr,B)<->b101(B))))).
% 298.55/296.76  all VarCurr (v2122(VarCurr)-> (all B (range_2_0(B)-> (v2110(VarCurr,B)<->b100(B))))).
% 298.55/296.76  all VarCurr (v2113(VarCurr)-> (all B (range_2_0(B)-> (v2110(VarCurr,B)<->b101(B))))).
% 298.55/296.76  all VarCurr (v2111(VarCurr)-> (all B (range_2_0(B)-> (v2110(VarCurr,B)<->b011(B))))).
% 298.55/296.76  all VarCurr (v2124(VarCurr)<->v2126(VarCurr)|v2131(VarCurr)).
% 298.55/296.76  all VarCurr (v2131(VarCurr)<-> (v2132(VarCurr,bitIndex2)<->$T)& (v2132(VarCurr,bitIndex1)<->$T)& (v2132(VarCurr,bitIndex0)<->$T)).
% 298.55/296.76  all VarCurr (v2132(VarCurr,bitIndex0)<->v1578(VarCurr)).
% 298.55/296.76  all VarCurr (v2132(VarCurr,bitIndex1)<->v1539(VarCurr)).
% 298.55/296.76  all VarCurr (v2132(VarCurr,bitIndex2)<->v1274(VarCurr)).
% 298.55/296.76  all VarCurr (v2126(VarCurr)<->v2127(VarCurr)|v2129(VarCurr)).
% 298.55/296.76  all VarCurr (v2129(VarCurr)<-> (v2130(VarCurr,bitIndex2)<->$T)& (v2130(VarCurr,bitIndex1)<->$T)& (v2130(VarCurr,bitIndex0)<->$F)).
% 298.55/296.76  all VarCurr (v2130(VarCurr,bitIndex0)<->v1578(VarCurr)).
% 298.55/296.76  all VarCurr (v2130(VarCurr,bitIndex1)<->v1539(VarCurr)).
% 298.55/296.76  all VarCurr (v2130(VarCurr,bitIndex2)<->v1274(VarCurr)).
% 298.55/296.76  all VarCurr (v2127(VarCurr)<-> (v2128(VarCurr,bitIndex2)<->$T)& (v2128(VarCurr,bitIndex1)<->$F)& (v2128(VarCurr,bitIndex0)<->$T)).
% 298.55/296.76  all VarCurr (v2128(VarCurr,bitIndex0)<->v1578(VarCurr)).
% 298.55/296.76  all VarCurr (v2128(VarCurr,bitIndex1)<->v1539(VarCurr)).
% 298.55/296.76  all VarCurr (v2128(VarCurr,bitIndex2)<->v1274(VarCurr)).
% 298.55/296.76  all VarCurr (v2122(VarCurr)<-> (v2123(VarCurr,bitIndex2)<->$T)& (v2123(VarCurr,bitIndex1)<->$F)& (v2123(VarCurr,bitIndex0)<->$F)).
% 298.55/296.76  all VarCurr (v2123(VarCurr,bitIndex0)<->v1578(VarCurr)).
% 298.55/296.76  all VarCurr (v2123(VarCurr,bitIndex1)<->v1539(VarCurr)).
% 298.55/296.76  all VarCurr (v2123(VarCurr,bitIndex2)<->v1274(VarCurr)).
% 298.55/296.76  all VarCurr (v2113(VarCurr)<->v2115(VarCurr)|v2120(VarCurr)).
% 298.55/296.76  all VarCurr (v2120(VarCurr)<-> (v2121(VarCurr,bitIndex2)<->$F)& (v2121(VarCurr,bitIndex1)<->$T)& (v2121(VarCurr,bitIndex0)<->$T)).
% 298.55/296.76  all VarCurr (v2121(VarCurr,bitIndex0)<->v1578(VarCurr)).
% 298.55/296.76  all VarCurr (v2121(VarCurr,bitIndex1)<->v1539(VarCurr)).
% 298.55/296.76  all VarCurr (v2121(VarCurr,bitIndex2)<->v1274(VarCurr)).
% 298.55/296.76  all VarCurr (v2115(VarCurr)<->v2116(VarCurr)|v2118(VarCurr)).
% 298.55/296.76  all VarCurr (v2118(VarCurr)<-> (v2119(VarCurr,bitIndex2)<->$F)& (v2119(VarCurr,bitIndex1)<->$T)& (v2119(VarCurr,bitIndex0)<->$F)).
% 298.55/296.76  all VarCurr (v2119(VarCurr,bitIndex0)<->v1578(VarCurr)).
% 298.55/296.76  all VarCurr (v2119(VarCurr,bitIndex1)<->v1539(VarCurr)).
% 298.55/296.76  all VarCurr (v2119(VarCurr,bitIndex2)<->v1274(VarCurr)).
% 298.55/296.76  all VarCurr (v2116(VarCurr)<-> (v2117(VarCurr,bitIndex2)<->$F)& (v2117(VarCurr,bitIndex1)<->$F)& (v2117(VarCurr,bitIndex0)<->$T)).
% 298.55/296.76  all VarCurr (v2117(VarCurr,bitIndex0)<->v1578(VarCurr)).
% 298.55/296.76  all VarCurr (v2117(VarCurr,bitIndex1)<->v1539(VarCurr)).
% 298.55/296.76  all VarCurr (v2117(VarCurr,bitIndex2)<->v1274(VarCurr)).
% 298.55/296.76  all VarCurr (v2111(VarCurr)<-> (v2112(VarCurr,bitIndex2)<->$F)& (v2112(VarCurr,bitIndex1)<->$F)& (v2112(VarCurr,bitIndex0)<->$F)).
% 298.55/296.76  all VarCurr (v2112(VarCurr,bitIndex0)<->v1578(VarCurr)).
% 298.55/296.76  all VarCurr (v2112(VarCurr,bitIndex1)<->v1539(VarCurr)).
% 298.55/296.76  all VarCurr (v2112(VarCurr,bitIndex2)<->v1274(VarCurr)).
% 298.55/296.77  all VarCurr (v2102(VarCurr)<->v2104(VarCurr)|v2109(VarCurr)).
% 298.55/296.77  all VarCurr (v2109(VarCurr)<-> (v1265(VarCurr,bitIndex2)<->$T)& (v1265(VarCurr,bitIndex1)<->$T)& (v1265(VarCurr,bitIndex0)<->$T)).
% 298.55/296.77  all VarCurr (v2104(VarCurr)<->v2105(VarCurr)|v2108(VarCurr)).
% 298.55/296.77  all VarCurr (v2108(VarCurr)<-> (v1265(VarCurr,bitIndex2)<->$T)& (v1265(VarCurr,bitIndex1)<->$T)& (v1265(VarCurr,bitIndex0)<->$F)).
% 298.55/296.77  all VarCurr (v2105(VarCurr)<->v2106(VarCurr)|v2107(VarCurr)).
% 298.55/296.77  all VarCurr (v2107(VarCurr)<-> (v1265(VarCurr,bitIndex2)<->$T)& (v1265(VarCurr,bitIndex1)<->$F)& (v1265(VarCurr,bitIndex0)<->$T)).
% 298.55/296.77  all VarCurr (v2106(VarCurr)<-> (v1265(VarCurr,bitIndex2)<->$T)& (v1265(VarCurr,bitIndex1)<->$F)& (v1265(VarCurr,bitIndex0)<->$F)).
% 298.55/296.77  all VarCurr (v2099(VarCurr)<->v2100(VarCurr)|v2101(VarCurr)).
% 298.55/296.77  all VarCurr (v2101(VarCurr)<-> (v1265(VarCurr,bitIndex2)<->$F)& (v1265(VarCurr,bitIndex1)<->$T)& (v1265(VarCurr,bitIndex0)<->$T)).
% 298.55/296.77  all VarCurr (v2100(VarCurr)<-> (v1265(VarCurr,bitIndex2)<->$F)& (v1265(VarCurr,bitIndex1)<->$T)& (v1265(VarCurr,bitIndex0)<->$F)).
% 298.55/296.77  all VarCurr (v2098(VarCurr)<-> (v1265(VarCurr,bitIndex2)<->$F)& (v1265(VarCurr,bitIndex1)<->$F)& (v1265(VarCurr,bitIndex0)<->$T)).
% 298.55/296.77  all VarCurr (v2096(VarCurr)<-> (v1261(VarCurr,bitIndex2)<->$F)& (v1261(VarCurr,bitIndex1)<->$T)& (v1261(VarCurr,bitIndex0)<->$T)).
% 298.55/296.77  all VarCurr (v2095(VarCurr)<-> (v1261(VarCurr,bitIndex2)<->$F)& (v1261(VarCurr,bitIndex1)<->$T)& (v1261(VarCurr,bitIndex0)<->$F)).
% 298.55/296.77  all VarCurr (v2094(VarCurr)<-> (v1261(VarCurr,bitIndex2)<->$F)& (v1261(VarCurr,bitIndex1)<->$F)& (v1261(VarCurr,bitIndex0)<->$T)).
% 298.55/296.77  all VarCurr (-v2083(VarCurr)& -v2086(VarCurr)-> (all B (range_2_0(B)-> (v2082(VarCurr,B)<->$F)))).
% 298.55/296.77  all VarCurr (v2086(VarCurr)-> (all B (range_2_0(B)-> (v2082(VarCurr,B)<->b110(B))))).
% 298.55/296.77  all VarCurr (v2083(VarCurr)-> (all B (range_2_0(B)-> (v2082(VarCurr,B)<->b001(B))))).
% 298.55/296.77  all VarCurr (v2086(VarCurr)<->v2088(VarCurr)|v2093(VarCurr)).
% 298.55/296.77  all VarCurr (v2093(VarCurr)<-> (v1265(VarCurr,bitIndex2)<->$T)& (v1265(VarCurr,bitIndex1)<->$T)& (v1265(VarCurr,bitIndex0)<->$T)).
% 298.55/296.77  all VarCurr (v2088(VarCurr)<->v2089(VarCurr)|v2092(VarCurr)).
% 298.55/296.77  all VarCurr (v2092(VarCurr)<-> (v1265(VarCurr,bitIndex2)<->$T)& (v1265(VarCurr,bitIndex1)<->$T)& (v1265(VarCurr,bitIndex0)<->$F)).
% 298.55/296.77  all VarCurr (v2089(VarCurr)<->v2090(VarCurr)|v2091(VarCurr)).
% 298.55/296.77  all VarCurr (v2091(VarCurr)<-> (v1265(VarCurr,bitIndex2)<->$T)& (v1265(VarCurr,bitIndex1)<->$F)& (v1265(VarCurr,bitIndex0)<->$T)).
% 298.55/296.77  all VarCurr (v2090(VarCurr)<-> (v1265(VarCurr,bitIndex2)<->$T)& (v1265(VarCurr,bitIndex1)<->$F)& (v1265(VarCurr,bitIndex0)<->$F)).
% 298.55/296.77  all VarCurr (v2083(VarCurr)<->v2084(VarCurr)|v2085(VarCurr)).
% 298.55/296.77  all VarCurr (v2085(VarCurr)<-> (v1265(VarCurr,bitIndex2)<->$F)& (v1265(VarCurr,bitIndex1)<->$T)& (v1265(VarCurr,bitIndex0)<->$T)).
% 298.55/296.77  all VarCurr (v2084(VarCurr)<-> (v1265(VarCurr,bitIndex2)<->$F)& (v1265(VarCurr,bitIndex1)<->$T)& (v1265(VarCurr,bitIndex0)<->$F)).
% 298.55/296.77  all VarCurr (v2081(VarCurr)<-> (v1261(VarCurr,bitIndex2)<->$F)& (v1261(VarCurr,bitIndex1)<->$F)& (v1261(VarCurr,bitIndex0)<->$F)).
% 298.55/296.77  all B (range_2_0(B)-> (v1261(constB0,B)<->$F)).
% 298.55/296.77  all VarCurr (v1578(VarCurr)<->v1580(VarCurr)).
% 298.55/296.77  all VarCurr (v1580(VarCurr)<->v1582(VarCurr)).
% 298.55/296.77  all VarCurr (v1582(VarCurr)<->v1584(VarCurr)).
% 298.55/296.77  all VarCurr (v1584(VarCurr)<->v1586(VarCurr)).
% 298.55/296.77  all VarCurr (v1586(VarCurr)<->v1588(VarCurr)).
% 298.55/296.77  all VarCurr (v1588(VarCurr)<->v1590(VarCurr)).
% 298.55/296.77  all VarCurr (v1590(VarCurr)<->v2076(VarCurr)|v1989(VarCurr)).
% 298.55/296.77  all VarCurr (v2076(VarCurr)<->v2077(VarCurr)|v1902(VarCurr)).
% 298.55/296.77  all VarCurr (v2077(VarCurr)<->v2078(VarCurr)|v1815(VarCurr)).
% 298.55/296.77  all VarCurr (v2078(VarCurr)<->v2079(VarCurr)|v1728(VarCurr)).
% 298.55/296.77  all VarCurr (v2079(VarCurr)<->v1592(VarCurr)).
% 298.55/296.77  all VarCurr (-v2055(VarCurr)-> (v1989(VarCurr)<->v2057(VarCurr))).
% 298.55/296.77  all VarCurr (v2055(VarCurr)-> (v1989(VarCurr)<->$F)).
% 298.55/296.77  all VarCurr (-v2058(VarCurr)& -v2061(VarCurr)& -v2063(VarCurr)& -v2065(VarCurr)& -v2067(VarCurr)& -v2069(VarCurr)& -v2071(VarCurr)& -v2073(VarCurr)-> (v2057(VarCurr)<->$F)).
% 298.55/296.77  all VarCurr (v2073(VarCurr)-> (v2057(VarCurr)<->v2074(VarCurr))).
% 298.55/296.77  all VarCurr (v2071(VarCurr)-> (v2057(VarCurr)<->v2072(VarCurr))).
% 298.55/296.77  all VarCurr (v2069(VarCurr)-> (v2057(VarCurr)<->v2070(VarCurr))).
% 298.59/296.78  all VarCurr (v2067(VarCurr)-> (v2057(VarCurr)<->v2068(VarCurr))).
% 298.59/296.78  all VarCurr (v2065(VarCurr)-> (v2057(VarCurr)<->v2066(VarCurr))).
% 298.59/296.78  all VarCurr (v2063(VarCurr)-> (v2057(VarCurr)<->v2064(VarCurr))).
% 298.59/296.78  all VarCurr (v2061(VarCurr)-> (v2057(VarCurr)<->v2062(VarCurr))).
% 298.59/296.78  all VarCurr (v2058(VarCurr)-> (v2057(VarCurr)<->v2060(VarCurr))).
% 298.59/296.78  all VarCurr (-v2074(VarCurr)<->v2047(VarCurr)).
% 298.59/296.78  all VarCurr (v2073(VarCurr)<-> (v2059(VarCurr,bitIndex2)<->$T)& (v2059(VarCurr,bitIndex1)<->$T)& (v2059(VarCurr,bitIndex0)<->$T)).
% 298.59/296.78  all VarCurr (-v2072(VarCurr)<->v2039(VarCurr)).
% 298.59/296.78  all VarCurr (v2071(VarCurr)<-> (v2059(VarCurr,bitIndex2)<->$T)& (v2059(VarCurr,bitIndex1)<->$F)& (v2059(VarCurr,bitIndex0)<->$T)).
% 298.59/296.78  all VarCurr (-v2070(VarCurr)<->v2031(VarCurr)).
% 298.59/296.78  all VarCurr (v2069(VarCurr)<-> (v2059(VarCurr,bitIndex2)<->$F)& (v2059(VarCurr,bitIndex1)<->$T)& (v2059(VarCurr,bitIndex0)<->$T)).
% 298.59/296.78  all VarCurr (-v2068(VarCurr)<->v2023(VarCurr)).
% 298.59/296.78  all VarCurr (v2067(VarCurr)<-> (v2059(VarCurr,bitIndex2)<->$F)& (v2059(VarCurr,bitIndex1)<->$F)& (v2059(VarCurr,bitIndex0)<->$T)).
% 298.59/296.78  all VarCurr (-v2066(VarCurr)<->v2015(VarCurr)).
% 298.59/296.78  all VarCurr (v2065(VarCurr)<-> (v2059(VarCurr,bitIndex2)<->$T)& (v2059(VarCurr,bitIndex1)<->$T)& (v2059(VarCurr,bitIndex0)<->$F)).
% 298.59/296.78  all VarCurr (-v2064(VarCurr)<->v2007(VarCurr)).
% 298.59/296.78  all VarCurr (v2063(VarCurr)<-> (v2059(VarCurr,bitIndex2)<->$T)& (v2059(VarCurr,bitIndex1)<->$F)& (v2059(VarCurr,bitIndex0)<->$F)).
% 298.59/296.78  all VarCurr (-v2062(VarCurr)<->v1999(VarCurr)).
% 298.59/296.78  all VarCurr (v2061(VarCurr)<-> (v2059(VarCurr,bitIndex2)<->$F)& (v2059(VarCurr,bitIndex1)<->$T)& (v2059(VarCurr,bitIndex0)<->$F)).
% 298.59/296.78  all VarCurr (-v2060(VarCurr)<->v1991(VarCurr)).
% 298.59/296.78  all VarCurr (v2058(VarCurr)<-> (v2059(VarCurr,bitIndex2)<->$F)& (v2059(VarCurr,bitIndex1)<->$F)& (v2059(VarCurr,bitIndex0)<->$F)).
% 298.59/296.78  all VarCurr (v2059(VarCurr,bitIndex0)<->v1616(VarCurr)).
% 298.59/296.78  all VarCurr ((v2059(VarCurr,bitIndex2)<->v1600(VarCurr,bitIndex1))& (v2059(VarCurr,bitIndex1)<->v1600(VarCurr,bitIndex0))).
% 298.59/296.78  all VarCurr (v2055(VarCurr)<->v1594(VarCurr)|v2056(VarCurr)).
% 298.59/296.78  all VarCurr (-v2056(VarCurr)<->v1510(VarCurr)).
% 298.59/296.78  all VarCurr (v2047(VarCurr)<->v2049(VarCurr)).
% 298.59/296.78  all VarCurr (v2049(VarCurr)<->v2051(VarCurr)).
% 298.59/296.78  all VarCurr (v2051(VarCurr)<->v2053(VarCurr)).
% 298.59/296.78  all VarCurr (v2053(VarCurr)<->$T).
% 298.59/296.78  all VarCurr (v2039(VarCurr)<->v2041(VarCurr)).
% 298.59/296.78  all VarCurr (v2041(VarCurr)<->v2043(VarCurr)).
% 298.59/296.78  all VarCurr (v2043(VarCurr)<->v2045(VarCurr)).
% 298.59/296.78  all VarCurr (v2045(VarCurr)<->$T).
% 298.59/296.78  all VarCurr (v2031(VarCurr)<->v2033(VarCurr)).
% 298.59/296.78  all VarCurr (v2033(VarCurr)<->v2035(VarCurr)).
% 298.59/296.78  all VarCurr (v2035(VarCurr)<->v2037(VarCurr)).
% 298.59/296.78  all VarCurr (v2037(VarCurr)<->$T).
% 298.59/296.78  all VarCurr (v2023(VarCurr)<->v2025(VarCurr)).
% 298.59/296.78  all VarCurr (v2025(VarCurr)<->v2027(VarCurr)).
% 298.59/296.78  all VarCurr (v2027(VarCurr)<->v2029(VarCurr)).
% 298.59/296.78  all VarCurr (v2029(VarCurr)<->$T).
% 298.59/296.78  all VarCurr (v2015(VarCurr)<->v2017(VarCurr)).
% 298.59/296.78  all VarCurr (v2017(VarCurr)<->v2019(VarCurr)).
% 298.59/296.78  all VarCurr (v2019(VarCurr)<->v2021(VarCurr)).
% 298.59/296.78  all VarCurr (v2021(VarCurr)<->$T).
% 298.59/296.78  all VarCurr (v2007(VarCurr)<->v2009(VarCurr)).
% 298.59/296.78  all VarCurr (v2009(VarCurr)<->v2011(VarCurr)).
% 298.59/296.78  all VarCurr (v2011(VarCurr)<->v2013(VarCurr)).
% 298.59/296.78  all VarCurr (v2013(VarCurr)<->$T).
% 298.59/296.78  all VarCurr (v1999(VarCurr)<->v2001(VarCurr)).
% 298.59/296.78  all VarCurr (v2001(VarCurr)<->v2003(VarCurr)).
% 298.59/296.78  all VarCurr (v2003(VarCurr)<->v2005(VarCurr)).
% 298.59/296.78  all VarCurr (v2005(VarCurr)<->$T).
% 298.59/296.78  all VarCurr (v1991(VarCurr)<->v1993(VarCurr)).
% 298.59/296.78  all VarCurr (v1993(VarCurr)<->v1995(VarCurr)).
% 298.59/296.78  all VarCurr (v1995(VarCurr)<->v1997(VarCurr)).
% 298.59/296.78  all VarCurr (v1997(VarCurr)<->$T).
% 298.59/296.78  all VarCurr (-v1968(VarCurr)-> (v1902(VarCurr)<->v1970(VarCurr))).
% 298.59/296.78  all VarCurr (v1968(VarCurr)-> (v1902(VarCurr)<->$F)).
% 298.59/296.78  all VarCurr (-v1971(VarCurr)& -v1974(VarCurr)& -v1976(VarCurr)& -v1978(VarCurr)& -v1980(VarCurr)& -v1982(VarCurr)& -v1984(VarCurr)& -v1986(VarCurr)-> (v1970(VarCurr)<->$F)).
% 298.59/296.78  all VarCurr (v1986(VarCurr)-> (v1970(VarCurr)<->v1987(VarCurr))).
% 298.59/296.78  all VarCurr (v1984(VarCurr)-> (v1970(VarCurr)<->v1985(VarCurr))).
% 298.59/296.78  all VarCurr (v1982(VarCurr)-> (v1970(VarCurr)<->v1983(VarCurr))).
% 298.59/296.78  all VarCurr (v1980(VarCurr)-> (v1970(VarCurr)<->v1981(VarCurr))).
% 298.59/296.78  all VarCurr (v1978(VarCurr)-> (v1970(VarCurr)<->v1979(VarCurr))).
% 298.59/296.79  all VarCurr (v1976(VarCurr)-> (v1970(VarCurr)<->v1977(VarCurr))).
% 298.59/296.79  all VarCurr (v1974(VarCurr)-> (v1970(VarCurr)<->v1975(VarCurr))).
% 298.59/296.79  all VarCurr (v1971(VarCurr)-> (v1970(VarCurr)<->v1973(VarCurr))).
% 298.59/296.79  all VarCurr (-v1987(VarCurr)<->v1960(VarCurr)).
% 298.59/296.79  all VarCurr (v1986(VarCurr)<-> (v1972(VarCurr,bitIndex2)<->$T)& (v1972(VarCurr,bitIndex1)<->$T)& (v1972(VarCurr,bitIndex0)<->$T)).
% 298.59/296.79  all VarCurr (-v1985(VarCurr)<->v1952(VarCurr)).
% 298.59/296.79  all VarCurr (v1984(VarCurr)<-> (v1972(VarCurr,bitIndex2)<->$T)& (v1972(VarCurr,bitIndex1)<->$F)& (v1972(VarCurr,bitIndex0)<->$T)).
% 298.59/296.79  all VarCurr (-v1983(VarCurr)<->v1944(VarCurr)).
% 298.59/296.79  all VarCurr (v1982(VarCurr)<-> (v1972(VarCurr,bitIndex2)<->$F)& (v1972(VarCurr,bitIndex1)<->$T)& (v1972(VarCurr,bitIndex0)<->$T)).
% 298.59/296.79  all VarCurr (-v1981(VarCurr)<->v1936(VarCurr)).
% 298.59/296.79  all VarCurr (v1980(VarCurr)<-> (v1972(VarCurr,bitIndex2)<->$F)& (v1972(VarCurr,bitIndex1)<->$F)& (v1972(VarCurr,bitIndex0)<->$T)).
% 298.59/296.79  all VarCurr (-v1979(VarCurr)<->v1928(VarCurr)).
% 298.59/296.79  all VarCurr (v1978(VarCurr)<-> (v1972(VarCurr,bitIndex2)<->$T)& (v1972(VarCurr,bitIndex1)<->$T)& (v1972(VarCurr,bitIndex0)<->$F)).
% 298.59/296.79  all VarCurr (-v1977(VarCurr)<->v1920(VarCurr)).
% 298.59/296.79  all VarCurr (v1976(VarCurr)<-> (v1972(VarCurr,bitIndex2)<->$T)& (v1972(VarCurr,bitIndex1)<->$F)& (v1972(VarCurr,bitIndex0)<->$F)).
% 298.59/296.79  all VarCurr (-v1975(VarCurr)<->v1912(VarCurr)).
% 298.59/296.79  all VarCurr (v1974(VarCurr)<-> (v1972(VarCurr,bitIndex2)<->$F)& (v1972(VarCurr,bitIndex1)<->$T)& (v1972(VarCurr,bitIndex0)<->$F)).
% 298.59/296.79  all VarCurr (-v1973(VarCurr)<->v1904(VarCurr)).
% 298.59/296.79  all VarCurr (v1971(VarCurr)<-> (v1972(VarCurr,bitIndex2)<->$F)& (v1972(VarCurr,bitIndex1)<->$F)& (v1972(VarCurr,bitIndex0)<->$F)).
% 298.59/296.79  all VarCurr (v1972(VarCurr,bitIndex0)<->v1616(VarCurr)).
% 298.59/296.79  all VarCurr ((v1972(VarCurr,bitIndex2)<->v1600(VarCurr,bitIndex1))& (v1972(VarCurr,bitIndex1)<->v1600(VarCurr,bitIndex0))).
% 298.59/296.79  all VarCurr (v1968(VarCurr)<->v1594(VarCurr)|v1969(VarCurr)).
% 298.59/296.79  all VarCurr (-v1969(VarCurr)<->v1500(VarCurr)).
% 298.59/296.79  all VarCurr (v1960(VarCurr)<->v1962(VarCurr)).
% 298.59/296.79  all VarCurr (v1962(VarCurr)<->v1964(VarCurr)).
% 298.59/296.79  all VarCurr (v1964(VarCurr)<->v1966(VarCurr)).
% 298.59/296.79  all VarCurr (v1966(VarCurr)<->$T).
% 298.59/296.79  all VarCurr (v1952(VarCurr)<->v1954(VarCurr)).
% 298.59/296.79  all VarCurr (v1954(VarCurr)<->v1956(VarCurr)).
% 298.59/296.79  all VarCurr (v1956(VarCurr)<->v1958(VarCurr)).
% 298.59/296.79  all VarCurr (v1958(VarCurr)<->$T).
% 298.59/296.79  all VarCurr (v1944(VarCurr)<->v1946(VarCurr)).
% 298.59/296.79  all VarCurr (v1946(VarCurr)<->v1948(VarCurr)).
% 298.59/296.79  all VarCurr (v1948(VarCurr)<->v1950(VarCurr)).
% 298.59/296.79  all VarCurr (v1950(VarCurr)<->$T).
% 298.59/296.79  all VarCurr (v1936(VarCurr)<->v1938(VarCurr)).
% 298.59/296.79  all VarCurr (v1938(VarCurr)<->v1940(VarCurr)).
% 298.59/296.79  all VarCurr (v1940(VarCurr)<->v1942(VarCurr)).
% 298.59/296.79  all VarCurr (v1942(VarCurr)<->$T).
% 298.59/296.79  all VarCurr (v1928(VarCurr)<->v1930(VarCurr)).
% 298.59/296.79  all VarCurr (v1930(VarCurr)<->v1932(VarCurr)).
% 298.59/296.79  all VarCurr (v1932(VarCurr)<->v1934(VarCurr)).
% 298.59/296.79  all VarCurr (v1934(VarCurr)<->$T).
% 298.59/296.79  all VarCurr (v1920(VarCurr)<->v1922(VarCurr)).
% 298.59/296.79  all VarCurr (v1922(VarCurr)<->v1924(VarCurr)).
% 298.59/296.79  all VarCurr (v1924(VarCurr)<->v1926(VarCurr)).
% 298.59/296.79  all VarCurr (v1926(VarCurr)<->$T).
% 298.59/296.79  all VarCurr (v1912(VarCurr)<->v1914(VarCurr)).
% 298.59/296.79  all VarCurr (v1914(VarCurr)<->v1916(VarCurr)).
% 298.59/296.79  all VarCurr (v1916(VarCurr)<->v1918(VarCurr)).
% 298.59/296.79  all VarCurr (v1918(VarCurr)<->$T).
% 298.59/296.79  all VarCurr (v1904(VarCurr)<->v1906(VarCurr)).
% 298.59/296.79  all VarCurr (v1906(VarCurr)<->v1908(VarCurr)).
% 298.59/296.79  all VarCurr (v1908(VarCurr)<->v1910(VarCurr)).
% 298.59/296.79  all VarCurr (v1910(VarCurr)<->$T).
% 298.59/296.79  all VarCurr (-v1881(VarCurr)-> (v1815(VarCurr)<->v1883(VarCurr))).
% 298.59/296.79  all VarCurr (v1881(VarCurr)-> (v1815(VarCurr)<->$F)).
% 298.59/296.79  all VarCurr (-v1884(VarCurr)& -v1887(VarCurr)& -v1889(VarCurr)& -v1891(VarCurr)& -v1893(VarCurr)& -v1895(VarCurr)& -v1897(VarCurr)& -v1899(VarCurr)-> (v1883(VarCurr)<->$F)).
% 298.59/296.79  all VarCurr (v1899(VarCurr)-> (v1883(VarCurr)<->v1900(VarCurr))).
% 298.59/296.79  all VarCurr (v1897(VarCurr)-> (v1883(VarCurr)<->v1898(VarCurr))).
% 298.59/296.79  all VarCurr (v1895(VarCurr)-> (v1883(VarCurr)<->v1896(VarCurr))).
% 298.59/296.79  all VarCurr (v1893(VarCurr)-> (v1883(VarCurr)<->v1894(VarCurr))).
% 298.59/296.79  all VarCurr (v1891(VarCurr)-> (v1883(VarCurr)<->v1892(VarCurr))).
% 298.59/296.79  all VarCurr (v1889(VarCurr)-> (v1883(VarCurr)<->v1890(VarCurr))).
% 298.59/296.79  all VarCurr (v1887(VarCurr)-> (v1883(VarCurr)<->v1888(VarCurr))).
% 298.59/296.80  all VarCurr (v1884(VarCurr)-> (v1883(VarCurr)<->v1886(VarCurr))).
% 298.59/296.80  all VarCurr (-v1900(VarCurr)<->v1873(VarCurr)).
% 298.59/296.80  all VarCurr (v1899(VarCurr)<-> (v1885(VarCurr,bitIndex2)<->$T)& (v1885(VarCurr,bitIndex1)<->$T)& (v1885(VarCurr,bitIndex0)<->$T)).
% 298.59/296.80  all VarCurr (-v1898(VarCurr)<->v1865(VarCurr)).
% 298.59/296.80  all VarCurr (v1897(VarCurr)<-> (v1885(VarCurr,bitIndex2)<->$T)& (v1885(VarCurr,bitIndex1)<->$F)& (v1885(VarCurr,bitIndex0)<->$T)).
% 298.59/296.80  all VarCurr (-v1896(VarCurr)<->v1857(VarCurr)).
% 298.59/296.80  all VarCurr (v1895(VarCurr)<-> (v1885(VarCurr,bitIndex2)<->$F)& (v1885(VarCurr,bitIndex1)<->$T)& (v1885(VarCurr,bitIndex0)<->$T)).
% 298.59/296.80  all VarCurr (-v1894(VarCurr)<->v1849(VarCurr)).
% 298.59/296.80  all VarCurr (v1893(VarCurr)<-> (v1885(VarCurr,bitIndex2)<->$F)& (v1885(VarCurr,bitIndex1)<->$F)& (v1885(VarCurr,bitIndex0)<->$T)).
% 298.59/296.80  all VarCurr (-v1892(VarCurr)<->v1841(VarCurr)).
% 298.59/296.80  all VarCurr (v1891(VarCurr)<-> (v1885(VarCurr,bitIndex2)<->$T)& (v1885(VarCurr,bitIndex1)<->$T)& (v1885(VarCurr,bitIndex0)<->$F)).
% 298.59/296.80  all VarCurr (-v1890(VarCurr)<->v1833(VarCurr)).
% 298.59/296.80  all VarCurr (v1889(VarCurr)<-> (v1885(VarCurr,bitIndex2)<->$T)& (v1885(VarCurr,bitIndex1)<->$F)& (v1885(VarCurr,bitIndex0)<->$F)).
% 298.59/296.80  all VarCurr (-v1888(VarCurr)<->v1825(VarCurr)).
% 298.59/296.80  all VarCurr (v1887(VarCurr)<-> (v1885(VarCurr,bitIndex2)<->$F)& (v1885(VarCurr,bitIndex1)<->$T)& (v1885(VarCurr,bitIndex0)<->$F)).
% 298.59/296.80  all VarCurr (-v1886(VarCurr)<->v1817(VarCurr)).
% 298.59/296.80  all VarCurr (v1884(VarCurr)<-> (v1885(VarCurr,bitIndex2)<->$F)& (v1885(VarCurr,bitIndex1)<->$F)& (v1885(VarCurr,bitIndex0)<->$F)).
% 298.59/296.80  all VarCurr (v1885(VarCurr,bitIndex0)<->v1616(VarCurr)).
% 298.59/296.80  all VarCurr ((v1885(VarCurr,bitIndex2)<->v1600(VarCurr,bitIndex1))& (v1885(VarCurr,bitIndex1)<->v1600(VarCurr,bitIndex0))).
% 298.59/296.80  all VarCurr (v1881(VarCurr)<->v1594(VarCurr)|v1882(VarCurr)).
% 298.59/296.80  all VarCurr (-v1882(VarCurr)<->v1490(VarCurr)).
% 298.59/296.80  all VarCurr (v1873(VarCurr)<->v1875(VarCurr)).
% 298.59/296.80  all VarCurr (v1875(VarCurr)<->v1877(VarCurr)).
% 298.59/296.80  all VarCurr (v1877(VarCurr)<->v1879(VarCurr)).
% 298.59/296.80  all VarCurr (v1879(VarCurr)<->v1646(VarCurr)).
% 298.59/296.80  all VarCurr (v1865(VarCurr)<->v1867(VarCurr)).
% 298.59/296.80  all VarCurr (v1867(VarCurr)<->v1869(VarCurr)).
% 298.59/296.80  all VarCurr (v1869(VarCurr)<->v1871(VarCurr)).
% 298.59/296.80  all VarCurr (v1871(VarCurr)<->v1646(VarCurr)).
% 298.59/296.80  all VarCurr (v1857(VarCurr)<->v1859(VarCurr)).
% 298.59/296.80  all VarCurr (v1859(VarCurr)<->v1861(VarCurr)).
% 298.59/296.80  all VarCurr (v1861(VarCurr)<->v1863(VarCurr)).
% 298.59/296.80  all VarCurr (v1863(VarCurr)<->v1646(VarCurr)).
% 298.59/296.80  all VarCurr (v1849(VarCurr)<->v1851(VarCurr)).
% 298.59/296.80  all VarCurr (v1851(VarCurr)<->v1853(VarCurr)).
% 298.59/296.80  all VarCurr (v1853(VarCurr)<->v1855(VarCurr)).
% 298.59/296.80  all VarCurr (v1855(VarCurr)<->v1646(VarCurr)).
% 298.59/296.80  all VarCurr (v1841(VarCurr)<->v1843(VarCurr)).
% 298.59/296.80  all VarCurr (v1843(VarCurr)<->v1845(VarCurr)).
% 298.59/296.80  all VarCurr (v1845(VarCurr)<->v1847(VarCurr)).
% 298.59/296.80  all VarCurr (v1847(VarCurr)<->v1646(VarCurr)).
% 298.59/296.80  all VarCurr (v1833(VarCurr)<->v1835(VarCurr)).
% 298.59/296.80  all VarCurr (v1835(VarCurr)<->v1837(VarCurr)).
% 298.59/296.80  all VarCurr (v1837(VarCurr)<->v1839(VarCurr)).
% 298.59/296.80  all VarCurr (v1839(VarCurr)<->v1646(VarCurr)).
% 298.59/296.80  all VarCurr (v1825(VarCurr)<->v1827(VarCurr)).
% 298.59/296.80  all VarCurr (v1827(VarCurr)<->v1829(VarCurr)).
% 298.59/296.80  all VarCurr (v1829(VarCurr)<->v1831(VarCurr)).
% 298.59/296.80  all VarCurr (v1831(VarCurr)<->v1646(VarCurr)).
% 298.59/296.80  all VarCurr (v1817(VarCurr)<->v1819(VarCurr)).
% 298.59/296.80  all VarCurr (v1819(VarCurr)<->v1821(VarCurr)).
% 298.59/296.80  all VarCurr (v1821(VarCurr)<->v1823(VarCurr)).
% 298.59/296.80  all VarCurr (v1823(VarCurr)<->v1646(VarCurr)).
% 298.59/296.80  all VarCurr (-v1794(VarCurr)-> (v1728(VarCurr)<->v1796(VarCurr))).
% 298.59/296.80  all VarCurr (v1794(VarCurr)-> (v1728(VarCurr)<->$F)).
% 298.59/296.80  all VarCurr (-v1797(VarCurr)& -v1800(VarCurr)& -v1802(VarCurr)& -v1804(VarCurr)& -v1806(VarCurr)& -v1808(VarCurr)& -v1810(VarCurr)& -v1812(VarCurr)-> (v1796(VarCurr)<->$F)).
% 298.59/296.80  all VarCurr (v1812(VarCurr)-> (v1796(VarCurr)<->v1813(VarCurr))).
% 298.59/296.80  all VarCurr (v1810(VarCurr)-> (v1796(VarCurr)<->v1811(VarCurr))).
% 298.59/296.80  all VarCurr (v1808(VarCurr)-> (v1796(VarCurr)<->v1809(VarCurr))).
% 298.59/296.80  all VarCurr (v1806(VarCurr)-> (v1796(VarCurr)<->v1807(VarCurr))).
% 298.59/296.80  all VarCurr (v1804(VarCurr)-> (v1796(VarCurr)<->v1805(VarCurr))).
% 298.59/296.80  all VarCurr (v1802(VarCurr)-> (v1796(VarCurr)<->v1803(VarCurr))).
% 298.59/296.80  all VarCurr (v1800(VarCurr)-> (v1796(VarCurr)<->v1801(VarCurr))).
% 298.59/296.80  all VarCurr (v1797(VarCurr)-> (v1796(VarCurr)<->v1799(VarCurr))).
% 298.59/296.80  all VarCurr (-v1813(VarCurr)<->v1786(VarCurr)).
% 298.59/296.80  all VarCurr (v1812(VarCurr)<-> (v1798(VarCurr,bitIndex2)<->$T)& (v1798(VarCurr,bitIndex1)<->$T)& (v1798(VarCurr,bitIndex0)<->$T)).
% 298.59/296.80  all VarCurr (-v1811(VarCurr)<->v1778(VarCurr)).
% 298.59/296.80  all VarCurr (v1810(VarCurr)<-> (v1798(VarCurr,bitIndex2)<->$T)& (v1798(VarCurr,bitIndex1)<->$F)& (v1798(VarCurr,bitIndex0)<->$T)).
% 298.59/296.80  all VarCurr (-v1809(VarCurr)<->v1770(VarCurr)).
% 298.59/296.80  all VarCurr (v1808(VarCurr)<-> (v1798(VarCurr,bitIndex2)<->$F)& (v1798(VarCurr,bitIndex1)<->$T)& (v1798(VarCurr,bitIndex0)<->$T)).
% 298.59/296.80  all VarCurr (-v1807(VarCurr)<->v1762(VarCurr)).
% 298.59/296.80  all VarCurr (v1806(VarCurr)<-> (v1798(VarCurr,bitIndex2)<->$F)& (v1798(VarCurr,bitIndex1)<->$F)& (v1798(VarCurr,bitIndex0)<->$T)).
% 298.59/296.80  all VarCurr (-v1805(VarCurr)<->v1754(VarCurr)).
% 298.59/296.80  all VarCurr (v1804(VarCurr)<-> (v1798(VarCurr,bitIndex2)<->$T)& (v1798(VarCurr,bitIndex1)<->$T)& (v1798(VarCurr,bitIndex0)<->$F)).
% 298.59/296.80  all VarCurr (-v1803(VarCurr)<->v1746(VarCurr)).
% 298.59/296.80  all VarCurr (v1802(VarCurr)<-> (v1798(VarCurr,bitIndex2)<->$T)& (v1798(VarCurr,bitIndex1)<->$F)& (v1798(VarCurr,bitIndex0)<->$F)).
% 298.59/296.80  all VarCurr (-v1801(VarCurr)<->v1738(VarCurr)).
% 298.59/296.80  all VarCurr (v1800(VarCurr)<-> (v1798(VarCurr,bitIndex2)<->$F)& (v1798(VarCurr,bitIndex1)<->$T)& (v1798(VarCurr,bitIndex0)<->$F)).
% 298.59/296.80  all VarCurr (-v1799(VarCurr)<->v1730(VarCurr)).
% 298.59/296.80  all VarCurr (v1797(VarCurr)<-> (v1798(VarCurr,bitIndex2)<->$F)& (v1798(VarCurr,bitIndex1)<->$F)& (v1798(VarCurr,bitIndex0)<->$F)).
% 298.59/296.80  all VarCurr (v1798(VarCurr,bitIndex0)<->v1616(VarCurr)).
% 298.59/296.80  all VarCurr ((v1798(VarCurr,bitIndex2)<->v1600(VarCurr,bitIndex1))& (v1798(VarCurr,bitIndex1)<->v1600(VarCurr,bitIndex0))).
% 298.59/296.80  all VarCurr (v1794(VarCurr)<->v1594(VarCurr)|v1795(VarCurr)).
% 298.59/296.80  all VarCurr (-v1795(VarCurr)<->v1480(VarCurr)).
% 298.59/296.80  all VarCurr (v1786(VarCurr)<->v1788(VarCurr)).
% 298.59/296.80  all VarCurr (v1788(VarCurr)<->v1790(VarCurr)).
% 298.59/296.80  all VarCurr (v1790(VarCurr)<->v1792(VarCurr)).
% 298.59/296.80  all VarCurr (v1792(VarCurr)<->v1646(VarCurr)).
% 298.59/296.80  all VarCurr (v1778(VarCurr)<->v1780(VarCurr)).
% 298.59/296.80  all VarCurr (v1780(VarCurr)<->v1782(VarCurr)).
% 298.59/296.80  all VarCurr (v1782(VarCurr)<->v1784(VarCurr)).
% 298.59/296.80  all VarCurr (v1784(VarCurr)<->v1646(VarCurr)).
% 298.59/296.80  all VarCurr (v1770(VarCurr)<->v1772(VarCurr)).
% 298.59/296.80  all VarCurr (v1772(VarCurr)<->v1774(VarCurr)).
% 298.59/296.80  all VarCurr (v1774(VarCurr)<->v1776(VarCurr)).
% 298.59/296.80  all VarCurr (v1776(VarCurr)<->v1646(VarCurr)).
% 298.59/296.80  all VarCurr (v1762(VarCurr)<->v1764(VarCurr)).
% 298.59/296.80  all VarCurr (v1764(VarCurr)<->v1766(VarCurr)).
% 298.59/296.80  all VarCurr (v1766(VarCurr)<->v1768(VarCurr)).
% 298.59/296.80  all VarCurr (v1768(VarCurr)<->v1646(VarCurr)).
% 298.59/296.80  all VarCurr (v1754(VarCurr)<->v1756(VarCurr)).
% 298.59/296.80  all VarCurr (v1756(VarCurr)<->v1758(VarCurr)).
% 298.59/296.80  all VarCurr (v1758(VarCurr)<->v1760(VarCurr)).
% 298.59/296.80  all VarCurr (v1760(VarCurr)<->v1646(VarCurr)).
% 298.59/296.80  all VarCurr (v1746(VarCurr)<->v1748(VarCurr)).
% 298.59/296.80  all VarCurr (v1748(VarCurr)<->v1750(VarCurr)).
% 298.59/296.80  all VarCurr (v1750(VarCurr)<->v1752(VarCurr)).
% 298.59/296.80  all VarCurr (v1752(VarCurr)<->v1646(VarCurr)).
% 298.59/296.80  all VarCurr (v1738(VarCurr)<->v1740(VarCurr)).
% 298.59/296.80  all VarCurr (v1740(VarCurr)<->v1742(VarCurr)).
% 298.59/296.80  all VarCurr (v1742(VarCurr)<->v1744(VarCurr)).
% 298.59/296.80  all VarCurr (v1744(VarCurr)<->v1646(VarCurr)).
% 298.59/296.80  all VarCurr (v1730(VarCurr)<->v1732(VarCurr)).
% 298.59/296.80  all VarCurr (v1732(VarCurr)<->v1734(VarCurr)).
% 298.59/296.80  all VarCurr (v1734(VarCurr)<->v1736(VarCurr)).
% 298.59/296.80  all VarCurr (v1736(VarCurr)<->v1646(VarCurr)).
% 298.59/296.80  all VarCurr (-v1707(VarCurr)-> (v1592(VarCurr)<->v1709(VarCurr))).
% 298.59/296.80  all VarCurr (v1707(VarCurr)-> (v1592(VarCurr)<->$F)).
% 298.59/296.80  all VarCurr (-v1710(VarCurr)& -v1713(VarCurr)& -v1715(VarCurr)& -v1717(VarCurr)& -v1719(VarCurr)& -v1721(VarCurr)& -v1723(VarCurr)& -v1725(VarCurr)-> (v1709(VarCurr)<->$F)).
% 298.59/296.80  all VarCurr (v1725(VarCurr)-> (v1709(VarCurr)<->v1726(VarCurr))).
% 298.59/296.80  all VarCurr (v1723(VarCurr)-> (v1709(VarCurr)<->v1724(VarCurr))).
% 298.59/296.80  all VarCurr (v1721(VarCurr)-> (v1709(VarCurr)<->v1722(VarCurr))).
% 298.59/296.80  all VarCurr (v1719(VarCurr)-> (v1709(VarCurr)<->v1720(VarCurr))).
% 298.59/296.80  all VarCurr (v1717(VarCurr)-> (v1709(VarCurr)<->v1718(VarCurr))).
% 298.59/296.80  all VarCurr (v1715(VarCurr)-> (v1709(VarCurr)<->v1716(VarCurr))).
% 298.59/296.80  all VarCurr (v1713(VarCurr)-> (v1709(VarCurr)<->v1714(VarCurr))).
% 298.59/296.80  all VarCurr (v1710(VarCurr)-> (v1709(VarCurr)<->v1712(VarCurr))).
% 298.59/296.81  all VarCurr (-v1726(VarCurr)<->v1699(VarCurr)).
% 298.59/296.81  all VarCurr (v1725(VarCurr)<-> (v1711(VarCurr,bitIndex2)<->$T)& (v1711(VarCurr,bitIndex1)<->$T)& (v1711(VarCurr,bitIndex0)<->$T)).
% 298.59/296.81  all VarCurr (-v1724(VarCurr)<->v1691(VarCurr)).
% 298.59/296.81  all VarCurr (v1723(VarCurr)<-> (v1711(VarCurr,bitIndex2)<->$T)& (v1711(VarCurr,bitIndex1)<->$F)& (v1711(VarCurr,bitIndex0)<->$T)).
% 298.59/296.81  all VarCurr (-v1722(VarCurr)<->v1683(VarCurr)).
% 298.59/296.81  all VarCurr (v1721(VarCurr)<-> (v1711(VarCurr,bitIndex2)<->$F)& (v1711(VarCurr,bitIndex1)<->$T)& (v1711(VarCurr,bitIndex0)<->$T)).
% 298.59/296.81  all VarCurr (-v1720(VarCurr)<->v1675(VarCurr)).
% 298.59/296.81  all VarCurr (v1719(VarCurr)<-> (v1711(VarCurr,bitIndex2)<->$F)& (v1711(VarCurr,bitIndex1)<->$F)& (v1711(VarCurr,bitIndex0)<->$T)).
% 298.59/296.81  all VarCurr (-v1718(VarCurr)<->v1667(VarCurr)).
% 298.59/296.81  all VarCurr (v1717(VarCurr)<-> (v1711(VarCurr,bitIndex2)<->$T)& (v1711(VarCurr,bitIndex1)<->$T)& (v1711(VarCurr,bitIndex0)<->$F)).
% 298.59/296.81  all VarCurr (-v1716(VarCurr)<->v1659(VarCurr)).
% 298.59/296.81  all VarCurr (v1715(VarCurr)<-> (v1711(VarCurr,bitIndex2)<->$T)& (v1711(VarCurr,bitIndex1)<->$F)& (v1711(VarCurr,bitIndex0)<->$F)).
% 298.59/296.81  all VarCurr (-v1714(VarCurr)<->v1651(VarCurr)).
% 298.59/296.81  all VarCurr (v1713(VarCurr)<-> (v1711(VarCurr,bitIndex2)<->$F)& (v1711(VarCurr,bitIndex1)<->$T)& (v1711(VarCurr,bitIndex0)<->$F)).
% 298.59/296.81  all VarCurr (-v1712(VarCurr)<->v1638(VarCurr)).
% 298.59/296.81  all VarCurr (v1710(VarCurr)<-> (v1711(VarCurr,bitIndex2)<->$F)& (v1711(VarCurr,bitIndex1)<->$F)& (v1711(VarCurr,bitIndex0)<->$F)).
% 298.59/296.81  all VarCurr (v1711(VarCurr,bitIndex0)<->v1616(VarCurr)).
% 298.59/296.81  all VarCurr ((v1711(VarCurr,bitIndex2)<->v1600(VarCurr,bitIndex1))& (v1711(VarCurr,bitIndex1)<->v1600(VarCurr,bitIndex0))).
% 298.59/296.81  all VarCurr (v1707(VarCurr)<->v1594(VarCurr)|v1708(VarCurr)).
% 298.59/296.81  all VarCurr (-v1708(VarCurr)<->v1470(VarCurr)).
% 298.59/296.81  all VarCurr (v1699(VarCurr)<->v1701(VarCurr)).
% 298.59/296.81  all VarCurr (v1701(VarCurr)<->v1703(VarCurr)).
% 298.59/296.81  all VarCurr (v1703(VarCurr)<->v1705(VarCurr)).
% 298.59/296.81  all VarCurr (v1705(VarCurr)<->v1646(VarCurr)).
% 298.59/296.81  all VarCurr (v1691(VarCurr)<->v1693(VarCurr)).
% 298.59/296.81  all VarCurr (v1693(VarCurr)<->v1695(VarCurr)).
% 298.59/296.81  all VarCurr (v1695(VarCurr)<->v1697(VarCurr)).
% 298.59/296.81  all VarCurr (v1697(VarCurr)<->v1646(VarCurr)).
% 298.59/296.81  all VarCurr (v1683(VarCurr)<->v1685(VarCurr)).
% 298.59/296.81  all VarCurr (v1685(VarCurr)<->v1687(VarCurr)).
% 298.59/296.81  all VarCurr (v1687(VarCurr)<->v1689(VarCurr)).
% 298.59/296.81  all VarCurr (v1689(VarCurr)<->v1646(VarCurr)).
% 298.59/296.81  all VarCurr (v1675(VarCurr)<->v1677(VarCurr)).
% 298.59/296.81  all VarCurr (v1677(VarCurr)<->v1679(VarCurr)).
% 298.59/296.81  all VarCurr (v1679(VarCurr)<->v1681(VarCurr)).
% 298.59/296.81  all VarCurr (v1681(VarCurr)<->v1646(VarCurr)).
% 298.59/296.81  all VarCurr (v1667(VarCurr)<->v1669(VarCurr)).
% 298.59/296.81  all VarCurr (v1669(VarCurr)<->v1671(VarCurr)).
% 298.59/296.81  all VarCurr (v1671(VarCurr)<->v1673(VarCurr)).
% 298.59/296.81  all VarCurr (v1673(VarCurr)<->v1646(VarCurr)).
% 298.59/296.81  all VarCurr (v1659(VarCurr)<->v1661(VarCurr)).
% 298.59/296.81  all VarCurr (v1661(VarCurr)<->v1663(VarCurr)).
% 298.59/296.81  all VarCurr (v1663(VarCurr)<->v1665(VarCurr)).
% 298.59/296.81  all VarCurr (v1665(VarCurr)<->v1646(VarCurr)).
% 298.59/296.81  all VarCurr (v1651(VarCurr)<->v1653(VarCurr)).
% 298.59/296.81  all VarCurr (v1653(VarCurr)<->v1655(VarCurr)).
% 298.59/296.81  all VarCurr (v1655(VarCurr)<->v1657(VarCurr)).
% 298.59/296.81  all VarCurr (v1657(VarCurr)<->v1646(VarCurr)).
% 298.59/296.81  all VarCurr (v1638(VarCurr)<->v1640(VarCurr)).
% 298.59/296.81  all VarCurr (v1640(VarCurr)<->v1642(VarCurr)).
% 298.59/296.81  all VarCurr (v1642(VarCurr)<->v1644(VarCurr)).
% 298.59/296.81  all VarCurr (v1644(VarCurr)<->v1646(VarCurr)).
% 298.59/296.81  all VarCurr (-v1646(VarCurr)<->v1649(VarCurr)).
% 298.59/296.81  all VarCurr (v1649(VarCurr)<->v250(VarCurr,bitIndex0)|v250(VarCurr,bitIndex1)).
% 298.59/296.81  all VarCurr (v1616(VarCurr)<->v1618(VarCurr)).
% 298.59/296.81  all VarCurr (v1618(VarCurr)<->v1620(VarCurr)).
% 298.59/296.81  all VarCurr (v1620(VarCurr)<->v1622(VarCurr)).
% 298.59/296.81  all VarCurr (v1622(VarCurr)<->v1624(VarCurr)).
% 298.59/296.81  all VarCurr (v1624(VarCurr)<->v1626(VarCurr)).
% 298.59/296.81  all VarCurr (v1626(VarCurr)<->v1628(VarCurr)).
% 298.59/296.81  all VarCurr (v1628(VarCurr)<->v1630(VarCurr)).
% 298.59/296.81  all VarCurr (v1630(VarCurr)<->v1632(VarCurr)).
% 298.59/296.81  all VarCurr (v1632(VarCurr)<->v1634(VarCurr)).
% 298.59/296.81  all VarCurr (v1634(VarCurr)<->v1636(VarCurr)).
% 298.59/296.81  v1636(constB0)<->$F.
% 298.59/296.81  all VarCurr B (range_1_0(B)-> (v1600(VarCurr,B)<->v1602(VarCurr,B))).
% 298.59/296.81  all VarCurr B (range_1_0(B)-> (v1602(VarCurr,B)<->v1604(VarCurr,B))).
% 298.59/296.81  all VarCurr B (range_1_0(B)-> (v1604(VarCurr,B)<->v1606(VarCurr,B))).
% 298.59/296.81  all VarCurr B (range_1_0(B)-> (v1606(VarCurr,B)<->v1608(VarCurr,B))).
% 298.59/296.82  all VarCurr B (range_1_0(B)-> (v1608(VarCurr,B)<->v1610(VarCurr,B))).
% 298.59/296.82  all VarCurr B (range_1_0(B)-> (v1610(VarCurr,B)<->v1612(VarCurr,B))).
% 298.59/296.82  all VarCurr B (range_1_0(B)-> (v1612(VarCurr,B)<->v1614(VarCurr,B))).
% 298.59/296.82  all B (range_1_0(B)-> (v1614(constB0,B)<->$F)).
% 298.59/296.82  all VarCurr (v1594(VarCurr)<->v1596(VarCurr)).
% 298.59/296.82  all VarCurr (v1596(VarCurr)<->v1598(VarCurr)).
% 298.59/296.82  all VarCurr (v1598(VarCurr)<->$F).
% 298.59/296.82  all VarCurr (v1539(VarCurr)<->v1541(VarCurr)).
% 298.59/296.82  all VarCurr (v1541(VarCurr)<->v1543(VarCurr)).
% 298.59/296.82  all VarCurr (v1543(VarCurr)<->v1545(VarCurr)).
% 298.59/296.82  all VarCurr (v1545(VarCurr)<->v1547(VarCurr)).
% 298.59/296.82  all VarCurr (v1547(VarCurr)<->v1549(VarCurr)).
% 298.59/296.82  all VarCurr (v1549(VarCurr)<->v1551(VarCurr)).
% 298.59/296.82  all VarCurr (v1551(VarCurr)<->v1553(VarCurr)).
% 298.59/296.82  all VarCurr (v1553(VarCurr)<->v1555(VarCurr)).
% 298.59/296.82  all VarCurr (v1555(VarCurr)<->v1557(VarCurr)).
% 298.59/296.82  all VarCurr (v1557(VarCurr)<->v1298(VarCurr)&v1575(VarCurr)).
% 298.59/296.82  all VarCurr (v1575(VarCurr)<->v1576(VarCurr)|v1563(VarCurr)).
% 298.59/296.82  all VarCurr (v1576(VarCurr)<->v1559(VarCurr)|v1561(VarCurr)).
% 298.59/296.82  v1561(constB0)<->$F.
% 298.59/296.82  v1559(constB0)<->$F.
% 298.59/296.82  all VarCurr (v1563(VarCurr)<->v1565(VarCurr)).
% 298.59/296.82  all VarCurr (v1565(VarCurr)<->v1567(VarCurr)).
% 298.59/296.82  all VarCurr (v1567(VarCurr)<->v1569(VarCurr)).
% 298.59/296.82  all VarCurr (v1569(VarCurr)<->v1571(VarCurr)).
% 298.59/296.82  all VarCurr (v1571(VarCurr)<->v1573(VarCurr)).
% 298.59/296.82  v1573(constB0)<->$F.
% 298.59/296.82  all VarCurr (v1274(VarCurr)<->v1276(VarCurr)).
% 298.59/296.82  all VarCurr (v1276(VarCurr)<->v1278(VarCurr)).
% 298.59/296.82  all VarCurr (v1278(VarCurr)<->v1280(VarCurr)).
% 298.59/296.82  all VarCurr (v1280(VarCurr)<->v1282(VarCurr)).
% 298.59/296.82  all VarCurr (v1282(VarCurr)<->v1284(VarCurr)).
% 298.59/296.82  all VarCurr (v1284(VarCurr)<->v1286(VarCurr)).
% 298.59/296.82  all VarCurr (v1286(VarCurr)<->v1288(VarCurr)).
% 298.59/296.82  all VarCurr (v1288(VarCurr)<->v1290(VarCurr)).
% 298.59/296.82  all VarCurr (v1290(VarCurr)<->v1292(VarCurr)).
% 298.59/296.82  all VarCurr (v1292(VarCurr)<->v1294(VarCurr)&v1520(VarCurr)).
% 298.59/296.82  all VarCurr (v1520(VarCurr)<->v1521(VarCurr)|v1510(VarCurr)).
% 298.59/296.82  all VarCurr (v1521(VarCurr)<->v1522(VarCurr)|v1500(VarCurr)).
% 298.59/296.82  all VarCurr (v1522(VarCurr)<->v1523(VarCurr)|v1490(VarCurr)).
% 298.59/296.82  all VarCurr (v1523(VarCurr)<->v1524(VarCurr)|v1480(VarCurr)).
% 298.59/296.82  all VarCurr (v1524(VarCurr)<->v1525(VarCurr)|v1470(VarCurr)).
% 298.59/296.82  all VarCurr (v1525(VarCurr)<->v1526(VarCurr)|v1462(VarCurr)).
% 298.59/296.82  all VarCurr (v1526(VarCurr)<->v1527(VarCurr)|v1454(VarCurr)).
% 298.59/296.82  all VarCurr (v1527(VarCurr)<->v1528(VarCurr)|v1446(VarCurr)).
% 298.59/296.82  all VarCurr (v1528(VarCurr)<->v1529(VarCurr)|v1438(VarCurr)).
% 298.59/296.82  all VarCurr (v1529(VarCurr)<->v1530(VarCurr)|v1430(VarCurr)).
% 298.59/296.82  all VarCurr (v1530(VarCurr)<->v1531(VarCurr)|v1422(VarCurr)).
% 298.59/296.82  all VarCurr (v1531(VarCurr)<->v1532(VarCurr)|v1414(VarCurr)).
% 298.59/296.82  all VarCurr (v1532(VarCurr)<->v1533(VarCurr)|v1406(VarCurr)).
% 298.59/296.82  all VarCurr (v1533(VarCurr)<->v1534(VarCurr)|v1398(VarCurr)).
% 298.59/296.82  all VarCurr (v1534(VarCurr)<->v1535(VarCurr)|v1390(VarCurr)).
% 298.59/296.82  all VarCurr (v1535(VarCurr)<->v1536(VarCurr)|v1382(VarCurr)).
% 298.59/296.82  all VarCurr (v1536(VarCurr)<->v1537(VarCurr)|v1374(VarCurr)).
% 298.59/296.82  all VarCurr (v1537(VarCurr)<->v1296(VarCurr)|v1366(VarCurr)).
% 298.59/296.82  v1294(constB0)<->$F.
% 298.59/296.82  all VarCurr (-v1359(VarCurr)-> (v1510(VarCurr)<->v1514(VarCurr))).
% 298.59/296.82  all VarCurr (v1359(VarCurr)-> (v1510(VarCurr)<->$F)).
% 298.59/296.82  all VarCurr (-v1361(VarCurr)& -v1363(VarCurr)-> (v1514(VarCurr)<->$F)).
% 298.59/296.82  all VarCurr (v1363(VarCurr)-> (v1514(VarCurr)<->v1517(VarCurr))).
% 298.59/296.82  all VarCurr (v1361(VarCurr)-> (v1514(VarCurr)<->v1515(VarCurr))).
% 298.59/296.82  all VarCurr (v1517(VarCurr)<-> (v1518(VarCurr,bitIndex22)<->$F)& (v1518(VarCurr,bitIndex21)<->$F)& (v1518(VarCurr,bitIndex20)<->$F)& (v1518(VarCurr,bitIndex19)<->$F)& (v1518(VarCurr,bitIndex18)<->$F)& (v1518(VarCurr,bitIndex17)<->$F)& (v1518(VarCurr,bitIndex16)<->$F)& (v1518(VarCurr,bitIndex15)<->$F)& (v1518(VarCurr,bitIndex14)<->$T)& (v1518(VarCurr,bitIndex13)<->$T)& (v1518(VarCurr,bitIndex12)<->$T)& (v1518(VarCurr,bitIndex11)<->$F)& (v1518(VarCurr,bitIndex10)<->$T)& (v1518(VarCurr,bitIndex9)<->$F)& (v1518(VarCurr,bitIndex8)<->$F)& (v1518(VarCurr,bitIndex7)<->$T)& (v1518(VarCurr,bitIndex6)<->$F)& (v1518(VarCurr,bitIndex5)<->$F)& (v1518(VarCurr,bitIndex4)<->$T)& (v1518(VarCurr,bitIndex3)<->$F)& (v1518(VarCurr,bitIndex2)<->$F)& (v1518(VarCurr,bitIndex1)<->$F)& (v1518(VarCurr,bitIndex0)<->$T)).
% 298.59/296.82  -b00000000111010010010001(bitIndex22).
% 298.59/296.83  -b00000000111010010010001(bitIndex21).
% 298.59/296.83  -b00000000111010010010001(bitIndex20).
% 298.59/296.83  -b00000000111010010010001(bitIndex19).
% 298.59/296.83  -b00000000111010010010001(bitIndex18).
% 298.59/296.83  -b00000000111010010010001(bitIndex17).
% 298.59/296.83  -b00000000111010010010001(bitIndex16).
% 298.59/296.83  -b00000000111010010010001(bitIndex15).
% 298.59/296.83  b00000000111010010010001(bitIndex14).
% 298.59/296.83  b00000000111010010010001(bitIndex13).
% 298.59/296.83  b00000000111010010010001(bitIndex12).
% 298.59/296.83  -b00000000111010010010001(bitIndex11).
% 298.59/296.83  b00000000111010010010001(bitIndex10).
% 298.59/296.83  -b00000000111010010010001(bitIndex9).
% 298.59/296.83  -b00000000111010010010001(bitIndex8).
% 298.59/296.83  b00000000111010010010001(bitIndex7).
% 298.59/296.83  -b00000000111010010010001(bitIndex6).
% 298.59/296.83  -b00000000111010010010001(bitIndex5).
% 298.59/296.83  b00000000111010010010001(bitIndex4).
% 298.59/296.83  -b00000000111010010010001(bitIndex3).
% 298.59/296.83  -b00000000111010010010001(bitIndex2).
% 298.59/296.83  -b00000000111010010010001(bitIndex1).
% 298.59/296.83  b00000000111010010010001(bitIndex0).
% 298.59/296.83  all VarCurr ((v1518(VarCurr,bitIndex21)<->v1330(VarCurr,bitIndex26))& (v1518(VarCurr,bitIndex20)<->v1330(VarCurr,bitIndex25))& (v1518(VarCurr,bitIndex19)<->v1330(VarCurr,bitIndex24))& (v1518(VarCurr,bitIndex18)<->v1330(VarCurr,bitIndex23))& (v1518(VarCurr,bitIndex17)<->v1330(VarCurr,bitIndex22))& (v1518(VarCurr,bitIndex16)<->v1330(VarCurr,bitIndex21))& (v1518(VarCurr,bitIndex15)<->v1330(VarCurr,bitIndex20))& (v1518(VarCurr,bitIndex14)<->v1330(VarCurr,bitIndex19))& (v1518(VarCurr,bitIndex13)<->v1330(VarCurr,bitIndex18))& (v1518(VarCurr,bitIndex12)<->v1330(VarCurr,bitIndex17))& (v1518(VarCurr,bitIndex11)<->v1330(VarCurr,bitIndex16))& (v1518(VarCurr,bitIndex10)<->v1330(VarCurr,bitIndex15))& (v1518(VarCurr,bitIndex9)<->v1330(VarCurr,bitIndex14))& (v1518(VarCurr,bitIndex8)<->v1330(VarCurr,bitIndex13))& (v1518(VarCurr,bitIndex7)<->v1330(VarCurr,bitIndex12))& (v1518(VarCurr,bitIndex6)<->v1330(VarCurr,bitIndex11))& (v1518(VarCurr,bitIndex5)<->v1330(VarCurr,bitIndex10))& (v1518(VarCurr,bitIndex4)<->v1330(VarCurr,bitIndex9))& (v1518(VarCurr,bitIndex3)<->v1330(VarCurr,bitIndex8))& (v1518(VarCurr,bitIndex2)<->v1330(VarCurr,bitIndex7))& (v1518(VarCurr,bitIndex1)<->v1330(VarCurr,bitIndex6))& (v1518(VarCurr,bitIndex0)<->v1330(VarCurr,bitIndex5))).
% 298.59/296.83  all VarCurr (v1518(VarCurr,bitIndex22)<->$F).
% 298.59/296.83  all VarCurr (v1515(VarCurr)<-> (v1516(VarCurr,bitIndex22)<->$F)& (v1516(VarCurr,bitIndex21)<->$F)& (v1516(VarCurr,bitIndex20)<->$F)& (v1516(VarCurr,bitIndex19)<->$F)& (v1516(VarCurr,bitIndex18)<->$F)& (v1516(VarCurr,bitIndex17)<->$F)& (v1516(VarCurr,bitIndex16)<->$F)& (v1516(VarCurr,bitIndex15)<->$F)& (v1516(VarCurr,bitIndex14)<->$T)& (v1516(VarCurr,bitIndex13)<->$T)& (v1516(VarCurr,bitIndex12)<->$F)& (v1516(VarCurr,bitIndex11)<->$F)& (v1516(VarCurr,bitIndex10)<->$T)& (v1516(VarCurr,bitIndex9)<->$F)& (v1516(VarCurr,bitIndex8)<->$F)& (v1516(VarCurr,bitIndex7)<->$T)& (v1516(VarCurr,bitIndex6)<->$F)& (v1516(VarCurr,bitIndex5)<->$F)& (v1516(VarCurr,bitIndex4)<->$T)& (v1516(VarCurr,bitIndex3)<->$F)& (v1516(VarCurr,bitIndex2)<->$F)& (v1516(VarCurr,bitIndex1)<->$F)& (v1516(VarCurr,bitIndex0)<->$T)).
% 298.59/296.83  -b00000000110010010010001(bitIndex22).
% 298.59/296.83  -b00000000110010010010001(bitIndex21).
% 298.59/296.83  -b00000000110010010010001(bitIndex20).
% 298.59/296.83  -b00000000110010010010001(bitIndex19).
% 298.59/296.83  -b00000000110010010010001(bitIndex18).
% 298.59/296.83  -b00000000110010010010001(bitIndex17).
% 298.59/296.83  -b00000000110010010010001(bitIndex16).
% 298.59/296.83  -b00000000110010010010001(bitIndex15).
% 298.59/296.83  b00000000110010010010001(bitIndex14).
% 298.59/296.83  b00000000110010010010001(bitIndex13).
% 298.59/296.83  -b00000000110010010010001(bitIndex12).
% 298.59/296.83  -b00000000110010010010001(bitIndex11).
% 298.59/296.83  b00000000110010010010001(bitIndex10).
% 298.59/296.83  -b00000000110010010010001(bitIndex9).
% 298.59/296.83  -b00000000110010010010001(bitIndex8).
% 298.59/296.83  b00000000110010010010001(bitIndex7).
% 298.59/296.83  -b00000000110010010010001(bitIndex6).
% 298.59/296.83  -b00000000110010010010001(bitIndex5).
% 298.59/296.83  b00000000110010010010001(bitIndex4).
% 298.59/296.83  -b00000000110010010010001(bitIndex3).
% 298.59/296.83  -b00000000110010010010001(bitIndex2).
% 298.59/296.83  -b00000000110010010010001(bitIndex1).
% 298.59/296.83  b00000000110010010010001(bitIndex0).
% 298.59/296.83  all VarCurr ((v1516(VarCurr,bitIndex21)<->v1330(VarCurr,bitIndex26))& (v1516(VarCurr,bitIndex20)<->v1330(VarCurr,bitIndex25))& (v1516(VarCurr,bitIndex19)<->v1330(VarCurr,bitIndex24))& (v1516(VarCurr,bitIndex18)<->v1330(VarCurr,bitIndex23))& (v1516(VarCurr,bitIndex17)<->v1330(VarCurr,bitIndex22))& (v1516(VarCurr,bitIndex16)<->v1330(VarCurr,bitIndex21))& (v1516(VarCurr,bitIndex15)<->v1330(VarCurr,bitIndex20))& (v1516(VarCurr,bitIndex14)<->v1330(VarCurr,bitIndex19))& (v1516(VarCurr,bitIndex13)<->v1330(VarCurr,bitIndex18))& (v1516(VarCurr,bitIndex12)<->v1330(VarCurr,bitIndex17))& (v1516(VarCurr,bitIndex11)<->v1330(VarCurr,bitIndex16))& (v1516(VarCurr,bitIndex10)<->v1330(VarCurr,bitIndex15))& (v1516(VarCurr,bitIndex9)<->v1330(VarCurr,bitIndex14))& (v1516(VarCurr,bitIndex8)<->v1330(VarCurr,bitIndex13))& (v1516(VarCurr,bitIndex7)<->v1330(VarCurr,bitIndex12))& (v1516(VarCurr,bitIndex6)<->v1330(VarCurr,bitIndex11))& (v1516(VarCurr,bitIndex5)<->v1330(VarCurr,bitIndex10))& (v1516(VarCurr,bitIndex4)<->v1330(VarCurr,bitIndex9))& (v1516(VarCurr,bitIndex3)<->v1330(VarCurr,bitIndex8))& (v1516(VarCurr,bitIndex2)<->v1330(VarCurr,bitIndex7))& (v1516(VarCurr,bitIndex1)<->v1330(VarCurr,bitIndex6))& (v1516(VarCurr,bitIndex0)<->v1330(VarCurr,bitIndex5))).
% 298.59/296.84  all VarCurr (v1516(VarCurr,bitIndex22)<->$F).
% 298.59/296.84  all VarCurr (-v1359(VarCurr)-> (v1500(VarCurr)<->v1504(VarCurr))).
% 298.59/296.84  all VarCurr (v1359(VarCurr)-> (v1500(VarCurr)<->$F)).
% 298.59/296.84  all VarCurr (-v1361(VarCurr)& -v1363(VarCurr)-> (v1504(VarCurr)<->$F)).
% 298.59/296.84  all VarCurr (v1363(VarCurr)-> (v1504(VarCurr)<->v1507(VarCurr))).
% 298.59/296.84  all VarCurr (v1361(VarCurr)-> (v1504(VarCurr)<->v1505(VarCurr))).
% 298.59/296.84  all VarCurr (v1507(VarCurr)<-> (v1508(VarCurr,bitIndex23)<->$F)& (v1508(VarCurr,bitIndex22)<->$F)& (v1508(VarCurr,bitIndex21)<->$F)& (v1508(VarCurr,bitIndex20)<->$F)& (v1508(VarCurr,bitIndex19)<->$F)& (v1508(VarCurr,bitIndex18)<->$F)& (v1508(VarCurr,bitIndex17)<->$F)& (v1508(VarCurr,bitIndex16)<->$F)& (v1508(VarCurr,bitIndex15)<->$T)& (v1508(VarCurr,bitIndex14)<->$T)& (v1508(VarCurr,bitIndex13)<->$T)& (v1508(VarCurr,bitIndex12)<->$F)& (v1508(VarCurr,bitIndex11)<->$T)& (v1508(VarCurr,bitIndex10)<->$F)& (v1508(VarCurr,bitIndex9)<->$F)& (v1508(VarCurr,bitIndex8)<->$T)& (v1508(VarCurr,bitIndex7)<->$F)& (v1508(VarCurr,bitIndex6)<->$F)& (v1508(VarCurr,bitIndex5)<->$T)& (v1508(VarCurr,bitIndex4)<->$F)& (v1508(VarCurr,bitIndex3)<->$F)& (v1508(VarCurr,bitIndex2)<->$F)& (v1508(VarCurr,bitIndex1)<->$F)& (v1508(VarCurr,bitIndex0)<->$F)).
% 298.59/296.84  -b000000001110100100100000(bitIndex23).
% 298.59/296.84  -b000000001110100100100000(bitIndex22).
% 298.59/296.84  -b000000001110100100100000(bitIndex21).
% 298.59/296.84  -b000000001110100100100000(bitIndex20).
% 298.59/296.84  -b000000001110100100100000(bitIndex19).
% 298.59/296.84  -b000000001110100100100000(bitIndex18).
% 298.59/296.84  -b000000001110100100100000(bitIndex17).
% 298.59/296.84  -b000000001110100100100000(bitIndex16).
% 298.59/296.84  b000000001110100100100000(bitIndex15).
% 298.59/296.84  b000000001110100100100000(bitIndex14).
% 298.59/296.84  b000000001110100100100000(bitIndex13).
% 298.59/296.84  -b000000001110100100100000(bitIndex12).
% 298.59/296.84  b000000001110100100100000(bitIndex11).
% 298.59/296.84  -b000000001110100100100000(bitIndex10).
% 298.59/296.84  -b000000001110100100100000(bitIndex9).
% 298.59/296.84  b000000001110100100100000(bitIndex8).
% 298.59/296.84  -b000000001110100100100000(bitIndex7).
% 298.59/296.84  -b000000001110100100100000(bitIndex6).
% 298.59/296.84  b000000001110100100100000(bitIndex5).
% 298.59/296.84  -b000000001110100100100000(bitIndex4).
% 298.59/296.84  -b000000001110100100100000(bitIndex3).
% 298.59/296.84  -b000000001110100100100000(bitIndex2).
% 298.59/296.84  -b000000001110100100100000(bitIndex1).
% 298.59/296.84  -b000000001110100100100000(bitIndex0).
% 298.59/296.84  all VarCurr ((v1508(VarCurr,bitIndex22)<->v1330(VarCurr,bitIndex26))& (v1508(VarCurr,bitIndex21)<->v1330(VarCurr,bitIndex25))& (v1508(VarCurr,bitIndex20)<->v1330(VarCurr,bitIndex24))& (v1508(VarCurr,bitIndex19)<->v1330(VarCurr,bitIndex23))& (v1508(VarCurr,bitIndex18)<->v1330(VarCurr,bitIndex22))& (v1508(VarCurr,bitIndex17)<->v1330(VarCurr,bitIndex21))& (v1508(VarCurr,bitIndex16)<->v1330(VarCurr,bitIndex20))& (v1508(VarCurr,bitIndex15)<->v1330(VarCurr,bitIndex19))& (v1508(VarCurr,bitIndex14)<->v1330(VarCurr,bitIndex18))& (v1508(VarCurr,bitIndex13)<->v1330(VarCurr,bitIndex17))& (v1508(VarCurr,bitIndex12)<->v1330(VarCurr,bitIndex16))& (v1508(VarCurr,bitIndex11)<->v1330(VarCurr,bitIndex15))& (v1508(VarCurr,bitIndex10)<->v1330(VarCurr,bitIndex14))& (v1508(VarCurr,bitIndex9)<->v1330(VarCurr,bitIndex13))& (v1508(VarCurr,bitIndex8)<->v1330(VarCurr,bitIndex12))& (v1508(VarCurr,bitIndex7)<->v1330(VarCurr,bitIndex11))& (v1508(VarCurr,bitIndex6)<->v1330(VarCurr,bitIndex10))& (v1508(VarCurr,bitIndex5)<->v1330(VarCurr,bitIndex9))& (v1508(VarCurr,bitIndex4)<->v1330(VarCurr,bitIndex8))& (v1508(VarCurr,bitIndex3)<->v1330(VarCurr,bitIndex7))& (v1508(VarCurr,bitIndex2)<->v1330(VarCurr,bitIndex6))& (v1508(VarCurr,bitIndex1)<->v1330(VarCurr,bitIndex5))& (v1508(VarCurr,bitIndex0)<->v1330(VarCurr,bitIndex4))).
% 298.66/296.84  all VarCurr (v1508(VarCurr,bitIndex23)<->$F).
% 298.66/296.84  all VarCurr (v1505(VarCurr)<-> (v1506(VarCurr,bitIndex23)<->$F)& (v1506(VarCurr,bitIndex22)<->$F)& (v1506(VarCurr,bitIndex21)<->$F)& (v1506(VarCurr,bitIndex20)<->$F)& (v1506(VarCurr,bitIndex19)<->$F)& (v1506(VarCurr,bitIndex18)<->$F)& (v1506(VarCurr,bitIndex17)<->$F)& (v1506(VarCurr,bitIndex16)<->$F)& (v1506(VarCurr,bitIndex15)<->$T)& (v1506(VarCurr,bitIndex14)<->$T)& (v1506(VarCurr,bitIndex13)<->$F)& (v1506(VarCurr,bitIndex12)<->$F)& (v1506(VarCurr,bitIndex11)<->$T)& (v1506(VarCurr,bitIndex10)<->$F)& (v1506(VarCurr,bitIndex9)<->$F)& (v1506(VarCurr,bitIndex8)<->$T)& (v1506(VarCurr,bitIndex7)<->$F)& (v1506(VarCurr,bitIndex6)<->$F)& (v1506(VarCurr,bitIndex5)<->$T)& (v1506(VarCurr,bitIndex4)<->$F)& (v1506(VarCurr,bitIndex3)<->$F)& (v1506(VarCurr,bitIndex2)<->$F)& (v1506(VarCurr,bitIndex1)<->$F)& (v1506(VarCurr,bitIndex0)<->$F)).
% 298.66/296.84  -b000000001100100100100000(bitIndex23).
% 298.66/296.84  -b000000001100100100100000(bitIndex22).
% 298.66/296.84  -b000000001100100100100000(bitIndex21).
% 298.66/296.84  -b000000001100100100100000(bitIndex20).
% 298.66/296.84  -b000000001100100100100000(bitIndex19).
% 298.66/296.84  -b000000001100100100100000(bitIndex18).
% 298.66/296.84  -b000000001100100100100000(bitIndex17).
% 298.66/296.84  -b000000001100100100100000(bitIndex16).
% 298.66/296.84  b000000001100100100100000(bitIndex15).
% 298.66/296.84  b000000001100100100100000(bitIndex14).
% 298.66/296.84  -b000000001100100100100000(bitIndex13).
% 298.66/296.84  -b000000001100100100100000(bitIndex12).
% 298.66/296.84  b000000001100100100100000(bitIndex11).
% 298.66/296.84  -b000000001100100100100000(bitIndex10).
% 298.66/296.84  -b000000001100100100100000(bitIndex9).
% 298.66/296.84  b000000001100100100100000(bitIndex8).
% 298.66/296.84  -b000000001100100100100000(bitIndex7).
% 298.66/296.84  -b000000001100100100100000(bitIndex6).
% 298.66/296.84  b000000001100100100100000(bitIndex5).
% 298.66/296.84  -b000000001100100100100000(bitIndex4).
% 298.66/296.84  -b000000001100100100100000(bitIndex3).
% 298.66/296.84  -b000000001100100100100000(bitIndex2).
% 298.66/296.84  -b000000001100100100100000(bitIndex1).
% 298.66/296.84  -b000000001100100100100000(bitIndex0).
% 298.66/296.84  all VarCurr ((v1506(VarCurr,bitIndex22)<->v1330(VarCurr,bitIndex26))& (v1506(VarCurr,bitIndex21)<->v1330(VarCurr,bitIndex25))& (v1506(VarCurr,bitIndex20)<->v1330(VarCurr,bitIndex24))& (v1506(VarCurr,bitIndex19)<->v1330(VarCurr,bitIndex23))& (v1506(VarCurr,bitIndex18)<->v1330(VarCurr,bitIndex22))& (v1506(VarCurr,bitIndex17)<->v1330(VarCurr,bitIndex21))& (v1506(VarCurr,bitIndex16)<->v1330(VarCurr,bitIndex20))& (v1506(VarCurr,bitIndex15)<->v1330(VarCurr,bitIndex19))& (v1506(VarCurr,bitIndex14)<->v1330(VarCurr,bitIndex18))& (v1506(VarCurr,bitIndex13)<->v1330(VarCurr,bitIndex17))& (v1506(VarCurr,bitIndex12)<->v1330(VarCurr,bitIndex16))& (v1506(VarCurr,bitIndex11)<->v1330(VarCurr,bitIndex15))& (v1506(VarCurr,bitIndex10)<->v1330(VarCurr,bitIndex14))& (v1506(VarCurr,bitIndex9)<->v1330(VarCurr,bitIndex13))& (v1506(VarCurr,bitIndex8)<->v1330(VarCurr,bitIndex12))& (v1506(VarCurr,bitIndex7)<->v1330(VarCurr,bitIndex11))& (v1506(VarCurr,bitIndex6)<->v1330(VarCurr,bitIndex10))& (v1506(VarCurr,bitIndex5)<->v1330(VarCurr,bitIndex9))& (v1506(VarCurr,bitIndex4)<->v1330(VarCurr,bitIndex8))& (v1506(VarCurr,bitIndex3)<->v1330(VarCurr,bitIndex7))& (v1506(VarCurr,bitIndex2)<->v1330(VarCurr,bitIndex6))& (v1506(VarCurr,bitIndex1)<->v1330(VarCurr,bitIndex5))& (v1506(VarCurr,bitIndex0)<->v1330(VarCurr,bitIndex4))).
% 298.66/296.84  all VarCurr (v1506(VarCurr,bitIndex23)<->$F).
% 298.66/296.84  all VarCurr (-v1359(VarCurr)-> (v1490(VarCurr)<->v1494(VarCurr))).
% 298.66/296.84  all VarCurr (v1359(VarCurr)-> (v1490(VarCurr)<->$F)).
% 298.66/296.84  all VarCurr (-v1361(VarCurr)& -v1363(VarCurr)-> (v1494(VarCurr)<->$F)).
% 298.66/296.84  all VarCurr (v1363(VarCurr)-> (v1494(VarCurr)<->v1497(VarCurr))).
% 298.66/296.84  all VarCurr (v1361(VarCurr)-> (v1494(VarCurr)<->v1495(VarCurr))).
% 298.66/296.84  all VarCurr (v1497(VarCurr)<-> (v1498(VarCurr,bitIndex18)<->$F)& (v1498(VarCurr,bitIndex17)<->$F)& (v1498(VarCurr,bitIndex16)<->$F)& (v1498(VarCurr,bitIndex15)<->$F)& (v1498(VarCurr,bitIndex14)<->$F)& (v1498(VarCurr,bitIndex13)<->$F)& (v1498(VarCurr,bitIndex12)<->$F)& (v1498(VarCurr,bitIndex11)<->$F)& (v1498(VarCurr,bitIndex10)<->$T)& (v1498(VarCurr,bitIndex9)<->$T)& (v1498(VarCurr,bitIndex8)<->$T)& (v1498(VarCurr,bitIndex7)<->$F)& (v1498(VarCurr,bitIndex6)<->$T)& (v1498(VarCurr,bitIndex5)<->$F)& (v1498(VarCurr,bitIndex4)<->$F)& (v1498(VarCurr,bitIndex3)<->$T)& (v1498(VarCurr,bitIndex2)<->$F)& (v1498(VarCurr,bitIndex1)<->$F)& (v1498(VarCurr,bitIndex0)<->$F)).
% 298.66/296.85  -b0000000011101001000(bitIndex18).
% 298.66/296.85  -b0000000011101001000(bitIndex17).
% 298.66/296.85  -b0000000011101001000(bitIndex16).
% 298.66/296.85  -b0000000011101001000(bitIndex15).
% 298.66/296.85  -b0000000011101001000(bitIndex14).
% 298.66/296.85  -b0000000011101001000(bitIndex13).
% 298.66/296.85  -b0000000011101001000(bitIndex12).
% 298.66/296.85  -b0000000011101001000(bitIndex11).
% 298.66/296.85  b0000000011101001000(bitIndex10).
% 298.66/296.85  b0000000011101001000(bitIndex9).
% 298.66/296.85  b0000000011101001000(bitIndex8).
% 298.66/296.85  -b0000000011101001000(bitIndex7).
% 298.66/296.85  b0000000011101001000(bitIndex6).
% 298.66/296.85  -b0000000011101001000(bitIndex5).
% 298.66/296.85  -b0000000011101001000(bitIndex4).
% 298.66/296.85  b0000000011101001000(bitIndex3).
% 298.66/296.85  -b0000000011101001000(bitIndex2).
% 298.66/296.85  -b0000000011101001000(bitIndex1).
% 298.66/296.85  -b0000000011101001000(bitIndex0).
% 298.66/296.85  all VarCurr ((v1498(VarCurr,bitIndex17)<->v1330(VarCurr,bitIndex26))& (v1498(VarCurr,bitIndex16)<->v1330(VarCurr,bitIndex25))& (v1498(VarCurr,bitIndex15)<->v1330(VarCurr,bitIndex24))& (v1498(VarCurr,bitIndex14)<->v1330(VarCurr,bitIndex23))& (v1498(VarCurr,bitIndex13)<->v1330(VarCurr,bitIndex22))& (v1498(VarCurr,bitIndex12)<->v1330(VarCurr,bitIndex21))& (v1498(VarCurr,bitIndex11)<->v1330(VarCurr,bitIndex20))& (v1498(VarCurr,bitIndex10)<->v1330(VarCurr,bitIndex19))& (v1498(VarCurr,bitIndex9)<->v1330(VarCurr,bitIndex18))& (v1498(VarCurr,bitIndex8)<->v1330(VarCurr,bitIndex17))& (v1498(VarCurr,bitIndex7)<->v1330(VarCurr,bitIndex16))& (v1498(VarCurr,bitIndex6)<->v1330(VarCurr,bitIndex15))& (v1498(VarCurr,bitIndex5)<->v1330(VarCurr,bitIndex14))& (v1498(VarCurr,bitIndex4)<->v1330(VarCurr,bitIndex13))& (v1498(VarCurr,bitIndex3)<->v1330(VarCurr,bitIndex12))& (v1498(VarCurr,bitIndex2)<->v1330(VarCurr,bitIndex11))& (v1498(VarCurr,bitIndex1)<->v1330(VarCurr,bitIndex10))& (v1498(VarCurr,bitIndex0)<->v1330(VarCurr,bitIndex9))).
% 298.66/296.85  all VarCurr (v1498(VarCurr,bitIndex18)<->$F).
% 298.66/296.85  all VarCurr (v1495(VarCurr)<-> (v1496(VarCurr,bitIndex18)<->$F)& (v1496(VarCurr,bitIndex17)<->$F)& (v1496(VarCurr,bitIndex16)<->$F)& (v1496(VarCurr,bitIndex15)<->$F)& (v1496(VarCurr,bitIndex14)<->$F)& (v1496(VarCurr,bitIndex13)<->$F)& (v1496(VarCurr,bitIndex12)<->$F)& (v1496(VarCurr,bitIndex11)<->$F)& (v1496(VarCurr,bitIndex10)<->$T)& (v1496(VarCurr,bitIndex9)<->$T)& (v1496(VarCurr,bitIndex8)<->$F)& (v1496(VarCurr,bitIndex7)<->$F)& (v1496(VarCurr,bitIndex6)<->$T)& (v1496(VarCurr,bitIndex5)<->$F)& (v1496(VarCurr,bitIndex4)<->$F)& (v1496(VarCurr,bitIndex3)<->$T)& (v1496(VarCurr,bitIndex2)<->$F)& (v1496(VarCurr,bitIndex1)<->$F)& (v1496(VarCurr,bitIndex0)<->$F)).
% 298.66/296.85  -b0000000011001001000(bitIndex18).
% 298.66/296.85  -b0000000011001001000(bitIndex17).
% 298.66/296.85  -b0000000011001001000(bitIndex16).
% 298.66/296.85  -b0000000011001001000(bitIndex15).
% 298.66/296.85  -b0000000011001001000(bitIndex14).
% 298.66/296.85  -b0000000011001001000(bitIndex13).
% 298.66/296.85  -b0000000011001001000(bitIndex12).
% 298.66/296.85  -b0000000011001001000(bitIndex11).
% 298.66/296.85  b0000000011001001000(bitIndex10).
% 298.66/296.85  b0000000011001001000(bitIndex9).
% 298.66/296.85  -b0000000011001001000(bitIndex8).
% 298.66/296.85  -b0000000011001001000(bitIndex7).
% 298.66/296.85  b0000000011001001000(bitIndex6).
% 298.66/296.85  -b0000000011001001000(bitIndex5).
% 298.66/296.85  -b0000000011001001000(bitIndex4).
% 298.66/296.85  b0000000011001001000(bitIndex3).
% 298.66/296.85  -b0000000011001001000(bitIndex2).
% 298.66/296.85  -b0000000011001001000(bitIndex1).
% 298.66/296.85  -b0000000011001001000(bitIndex0).
% 298.66/296.85  all VarCurr ((v1496(VarCurr,bitIndex17)<->v1330(VarCurr,bitIndex26))& (v1496(VarCurr,bitIndex16)<->v1330(VarCurr,bitIndex25))& (v1496(VarCurr,bitIndex15)<->v1330(VarCurr,bitIndex24))& (v1496(VarCurr,bitIndex14)<->v1330(VarCurr,bitIndex23))& (v1496(VarCurr,bitIndex13)<->v1330(VarCurr,bitIndex22))& (v1496(VarCurr,bitIndex12)<->v1330(VarCurr,bitIndex21))& (v1496(VarCurr,bitIndex11)<->v1330(VarCurr,bitIndex20))& (v1496(VarCurr,bitIndex10)<->v1330(VarCurr,bitIndex19))& (v1496(VarCurr,bitIndex9)<->v1330(VarCurr,bitIndex18))& (v1496(VarCurr,bitIndex8)<->v1330(VarCurr,bitIndex17))& (v1496(VarCurr,bitIndex7)<->v1330(VarCurr,bitIndex16))& (v1496(VarCurr,bitIndex6)<->v1330(VarCurr,bitIndex15))& (v1496(VarCurr,bitIndex5)<->v1330(VarCurr,bitIndex14))& (v1496(VarCurr,bitIndex4)<->v1330(VarCurr,bitIndex13))& (v1496(VarCurr,bitIndex3)<->v1330(VarCurr,bitIndex12))& (v1496(VarCurr,bitIndex2)<->v1330(VarCurr,bitIndex11))& (v1496(VarCurr,bitIndex1)<->v1330(VarCurr,bitIndex10))& (v1496(VarCurr,bitIndex0)<->v1330(VarCurr,bitIndex9))).
% 298.66/296.86  all VarCurr (v1496(VarCurr,bitIndex18)<->$F).
% 298.66/296.86  all VarCurr (-v1359(VarCurr)-> (v1480(VarCurr)<->v1484(VarCurr))).
% 298.66/296.86  all VarCurr (v1359(VarCurr)-> (v1480(VarCurr)<->$F)).
% 298.66/296.86  all VarCurr (-v1361(VarCurr)& -v1363(VarCurr)-> (v1484(VarCurr)<->$F)).
% 298.66/296.86  all VarCurr (v1363(VarCurr)-> (v1484(VarCurr)<->v1487(VarCurr))).
% 298.66/296.86  all VarCurr (v1361(VarCurr)-> (v1484(VarCurr)<->v1485(VarCurr))).
% 298.66/296.86  all VarCurr (v1487(VarCurr)<-> (v1488(VarCurr,bitIndex21)<->$F)& (v1488(VarCurr,bitIndex20)<->$F)& (v1488(VarCurr,bitIndex19)<->$F)& (v1488(VarCurr,bitIndex18)<->$F)& (v1488(VarCurr,bitIndex17)<->$F)& (v1488(VarCurr,bitIndex16)<->$F)& (v1488(VarCurr,bitIndex15)<->$F)& (v1488(VarCurr,bitIndex14)<->$F)& (v1488(VarCurr,bitIndex13)<->$T)& (v1488(VarCurr,bitIndex12)<->$T)& (v1488(VarCurr,bitIndex11)<->$T)& (v1488(VarCurr,bitIndex10)<->$F)& (v1488(VarCurr,bitIndex9)<->$T)& (v1488(VarCurr,bitIndex8)<->$F)& (v1488(VarCurr,bitIndex7)<->$F)& (v1488(VarCurr,bitIndex6)<->$F)& (v1488(VarCurr,bitIndex5)<->$T)& (v1488(VarCurr,bitIndex4)<->$T)& (v1488(VarCurr,bitIndex3)<->$T)& (v1488(VarCurr,bitIndex2)<->$F)& (v1488(VarCurr,bitIndex1)<->$F)& (v1488(VarCurr,bitIndex0)<->$F)).
% 298.66/296.86  -b0000000011101000111000(bitIndex21).
% 298.66/296.86  -b0000000011101000111000(bitIndex20).
% 298.66/296.86  -b0000000011101000111000(bitIndex19).
% 298.66/296.86  -b0000000011101000111000(bitIndex18).
% 298.66/296.86  -b0000000011101000111000(bitIndex17).
% 298.66/296.86  -b0000000011101000111000(bitIndex16).
% 298.66/296.86  -b0000000011101000111000(bitIndex15).
% 298.66/296.86  -b0000000011101000111000(bitIndex14).
% 298.66/296.86  b0000000011101000111000(bitIndex13).
% 298.66/296.86  b0000000011101000111000(bitIndex12).
% 298.66/296.86  b0000000011101000111000(bitIndex11).
% 298.66/296.86  -b0000000011101000111000(bitIndex10).
% 298.66/296.86  b0000000011101000111000(bitIndex9).
% 298.66/296.86  -b0000000011101000111000(bitIndex8).
% 298.66/296.86  -b0000000011101000111000(bitIndex7).
% 298.66/296.86  -b0000000011101000111000(bitIndex6).
% 298.66/296.86  b0000000011101000111000(bitIndex5).
% 298.66/296.86  b0000000011101000111000(bitIndex4).
% 298.66/296.86  b0000000011101000111000(bitIndex3).
% 298.66/296.86  -b0000000011101000111000(bitIndex2).
% 298.66/296.86  -b0000000011101000111000(bitIndex1).
% 298.66/296.86  -b0000000011101000111000(bitIndex0).
% 298.66/296.86  all VarCurr ((v1488(VarCurr,bitIndex20)<->v1330(VarCurr,bitIndex26))& (v1488(VarCurr,bitIndex19)<->v1330(VarCurr,bitIndex25))& (v1488(VarCurr,bitIndex18)<->v1330(VarCurr,bitIndex24))& (v1488(VarCurr,bitIndex17)<->v1330(VarCurr,bitIndex23))& (v1488(VarCurr,bitIndex16)<->v1330(VarCurr,bitIndex22))& (v1488(VarCurr,bitIndex15)<->v1330(VarCurr,bitIndex21))& (v1488(VarCurr,bitIndex14)<->v1330(VarCurr,bitIndex20))& (v1488(VarCurr,bitIndex13)<->v1330(VarCurr,bitIndex19))& (v1488(VarCurr,bitIndex12)<->v1330(VarCurr,bitIndex18))& (v1488(VarCurr,bitIndex11)<->v1330(VarCurr,bitIndex17))& (v1488(VarCurr,bitIndex10)<->v1330(VarCurr,bitIndex16))& (v1488(VarCurr,bitIndex9)<->v1330(VarCurr,bitIndex15))& (v1488(VarCurr,bitIndex8)<->v1330(VarCurr,bitIndex14))& (v1488(VarCurr,bitIndex7)<->v1330(VarCurr,bitIndex13))& (v1488(VarCurr,bitIndex6)<->v1330(VarCurr,bitIndex12))& (v1488(VarCurr,bitIndex5)<->v1330(VarCurr,bitIndex11))& (v1488(VarCurr,bitIndex4)<->v1330(VarCurr,bitIndex10))& (v1488(VarCurr,bitIndex3)<->v1330(VarCurr,bitIndex9))& (v1488(VarCurr,bitIndex2)<->v1330(VarCurr,bitIndex8))& (v1488(VarCurr,bitIndex1)<->v1330(VarCurr,bitIndex7))& (v1488(VarCurr,bitIndex0)<->v1330(VarCurr,bitIndex6))).
% 298.66/296.86  all VarCurr (v1488(VarCurr,bitIndex21)<->$F).
% 298.66/296.86  all VarCurr (v1485(VarCurr)<-> (v1486(VarCurr,bitIndex21)<->$F)& (v1486(VarCurr,bitIndex20)<->$F)& (v1486(VarCurr,bitIndex19)<->$F)& (v1486(VarCurr,bitIndex18)<->$F)& (v1486(VarCurr,bitIndex17)<->$F)& (v1486(VarCurr,bitIndex16)<->$F)& (v1486(VarCurr,bitIndex15)<->$F)& (v1486(VarCurr,bitIndex14)<->$F)& (v1486(VarCurr,bitIndex13)<->$T)& (v1486(VarCurr,bitIndex12)<->$T)& (v1486(VarCurr,bitIndex11)<->$F)& (v1486(VarCurr,bitIndex10)<->$F)& (v1486(VarCurr,bitIndex9)<->$T)& (v1486(VarCurr,bitIndex8)<->$F)& (v1486(VarCurr,bitIndex7)<->$F)& (v1486(VarCurr,bitIndex6)<->$F)& (v1486(VarCurr,bitIndex5)<->$T)& (v1486(VarCurr,bitIndex4)<->$T)& (v1486(VarCurr,bitIndex3)<->$T)& (v1486(VarCurr,bitIndex2)<->$F)& (v1486(VarCurr,bitIndex1)<->$F)& (v1486(VarCurr,bitIndex0)<->$F)).
% 298.66/296.87  -b0000000011001000111000(bitIndex21).
% 298.66/296.87  -b0000000011001000111000(bitIndex20).
% 298.66/296.87  -b0000000011001000111000(bitIndex19).
% 298.66/296.87  -b0000000011001000111000(bitIndex18).
% 298.66/296.87  -b0000000011001000111000(bitIndex17).
% 298.66/296.87  -b0000000011001000111000(bitIndex16).
% 298.66/296.87  -b0000000011001000111000(bitIndex15).
% 298.66/296.87  -b0000000011001000111000(bitIndex14).
% 298.66/296.87  b0000000011001000111000(bitIndex13).
% 298.66/296.87  b0000000011001000111000(bitIndex12).
% 298.66/296.87  -b0000000011001000111000(bitIndex11).
% 298.66/296.87  -b0000000011001000111000(bitIndex10).
% 298.66/296.87  b0000000011001000111000(bitIndex9).
% 298.66/296.87  -b0000000011001000111000(bitIndex8).
% 298.66/296.87  -b0000000011001000111000(bitIndex7).
% 298.66/296.87  -b0000000011001000111000(bitIndex6).
% 298.66/296.87  b0000000011001000111000(bitIndex5).
% 298.66/296.87  b0000000011001000111000(bitIndex4).
% 298.66/296.87  b0000000011001000111000(bitIndex3).
% 298.66/296.87  -b0000000011001000111000(bitIndex2).
% 298.66/296.87  -b0000000011001000111000(bitIndex1).
% 298.66/296.87  -b0000000011001000111000(bitIndex0).
% 298.66/296.87  all VarCurr ((v1486(VarCurr,bitIndex20)<->v1330(VarCurr,bitIndex26))& (v1486(VarCurr,bitIndex19)<->v1330(VarCurr,bitIndex25))& (v1486(VarCurr,bitIndex18)<->v1330(VarCurr,bitIndex24))& (v1486(VarCurr,bitIndex17)<->v1330(VarCurr,bitIndex23))& (v1486(VarCurr,bitIndex16)<->v1330(VarCurr,bitIndex22))& (v1486(VarCurr,bitIndex15)<->v1330(VarCurr,bitIndex21))& (v1486(VarCurr,bitIndex14)<->v1330(VarCurr,bitIndex20))& (v1486(VarCurr,bitIndex13)<->v1330(VarCurr,bitIndex19))& (v1486(VarCurr,bitIndex12)<->v1330(VarCurr,bitIndex18))& (v1486(VarCurr,bitIndex11)<->v1330(VarCurr,bitIndex17))& (v1486(VarCurr,bitIndex10)<->v1330(VarCurr,bitIndex16))& (v1486(VarCurr,bitIndex9)<->v1330(VarCurr,bitIndex15))& (v1486(VarCurr,bitIndex8)<->v1330(VarCurr,bitIndex14))& (v1486(VarCurr,bitIndex7)<->v1330(VarCurr,bitIndex13))& (v1486(VarCurr,bitIndex6)<->v1330(VarCurr,bitIndex12))& (v1486(VarCurr,bitIndex5)<->v1330(VarCurr,bitIndex11))& (v1486(VarCurr,bitIndex4)<->v1330(VarCurr,bitIndex10))& (v1486(VarCurr,bitIndex3)<->v1330(VarCurr,bitIndex9))& (v1486(VarCurr,bitIndex2)<->v1330(VarCurr,bitIndex8))& (v1486(VarCurr,bitIndex1)<->v1330(VarCurr,bitIndex7))& (v1486(VarCurr,bitIndex0)<->v1330(VarCurr,bitIndex6))).
% 298.66/296.87  all VarCurr (v1486(VarCurr,bitIndex21)<->$F).
% 298.66/296.87  all VarCurr (-v1359(VarCurr)-> (v1470(VarCurr)<->v1474(VarCurr))).
% 298.66/296.87  all VarCurr (v1359(VarCurr)-> (v1470(VarCurr)<->$F)).
% 298.66/296.87  all VarCurr (-v1361(VarCurr)& -v1363(VarCurr)-> (v1474(VarCurr)<->$F)).
% 298.66/296.87  all VarCurr (v1363(VarCurr)-> (v1474(VarCurr)<->v1477(VarCurr))).
% 298.66/296.87  all VarCurr (v1361(VarCurr)-> (v1474(VarCurr)<->v1475(VarCurr))).
% 298.66/296.87  all VarCurr (v1477(VarCurr)<-> (v1478(VarCurr,bitIndex21)<->$F)& (v1478(VarCurr,bitIndex20)<->$F)& (v1478(VarCurr,bitIndex19)<->$F)& (v1478(VarCurr,bitIndex18)<->$F)& (v1478(VarCurr,bitIndex17)<->$F)& (v1478(VarCurr,bitIndex16)<->$F)& (v1478(VarCurr,bitIndex15)<->$F)& (v1478(VarCurr,bitIndex14)<->$F)& (v1478(VarCurr,bitIndex13)<->$T)& (v1478(VarCurr,bitIndex12)<->$T)& (v1478(VarCurr,bitIndex11)<->$T)& (v1478(VarCurr,bitIndex10)<->$F)& (v1478(VarCurr,bitIndex9)<->$T)& (v1478(VarCurr,bitIndex8)<->$F)& (v1478(VarCurr,bitIndex7)<->$F)& (v1478(VarCurr,bitIndex6)<->$F)& (v1478(VarCurr,bitIndex5)<->$T)& (v1478(VarCurr,bitIndex4)<->$T)& (v1478(VarCurr,bitIndex3)<->$F)& (v1478(VarCurr,bitIndex2)<->$F)& (v1478(VarCurr,bitIndex1)<->$F)& (v1478(VarCurr,bitIndex0)<->$F)).
% 298.66/296.87  -b0000000011101000110000(bitIndex21).
% 298.66/296.87  -b0000000011101000110000(bitIndex20).
% 298.66/296.87  -b0000000011101000110000(bitIndex19).
% 298.66/296.87  -b0000000011101000110000(bitIndex18).
% 298.66/296.87  -b0000000011101000110000(bitIndex17).
% 298.66/296.87  -b0000000011101000110000(bitIndex16).
% 298.66/296.87  -b0000000011101000110000(bitIndex15).
% 298.66/296.87  -b0000000011101000110000(bitIndex14).
% 298.66/296.87  b0000000011101000110000(bitIndex13).
% 298.66/296.87  b0000000011101000110000(bitIndex12).
% 298.66/296.87  b0000000011101000110000(bitIndex11).
% 298.66/296.87  -b0000000011101000110000(bitIndex10).
% 298.66/296.87  b0000000011101000110000(bitIndex9).
% 298.66/296.87  -b0000000011101000110000(bitIndex8).
% 298.66/296.87  -b0000000011101000110000(bitIndex7).
% 298.66/296.87  -b0000000011101000110000(bitIndex6).
% 298.66/296.87  b0000000011101000110000(bitIndex5).
% 298.66/296.87  b0000000011101000110000(bitIndex4).
% 298.66/296.87  -b0000000011101000110000(bitIndex3).
% 298.66/296.87  -b0000000011101000110000(bitIndex2).
% 298.66/296.87  -b0000000011101000110000(bitIndex1).
% 298.66/296.87  -b0000000011101000110000(bitIndex0).
% 298.66/296.87  all VarCurr ((v1478(VarCurr,bitIndex20)<->v1330(VarCurr,bitIndex26))& (v1478(VarCurr,bitIndex19)<->v1330(VarCurr,bitIndex25))& (v1478(VarCurr,bitIndex18)<->v1330(VarCurr,bitIndex24))& (v1478(VarCurr,bitIndex17)<->v1330(VarCurr,bitIndex23))& (v1478(VarCurr,bitIndex16)<->v1330(VarCurr,bitIndex22))& (v1478(VarCurr,bitIndex15)<->v1330(VarCurr,bitIndex21))& (v1478(VarCurr,bitIndex14)<->v1330(VarCurr,bitIndex20))& (v1478(VarCurr,bitIndex13)<->v1330(VarCurr,bitIndex19))& (v1478(VarCurr,bitIndex12)<->v1330(VarCurr,bitIndex18))& (v1478(VarCurr,bitIndex11)<->v1330(VarCurr,bitIndex17))& (v1478(VarCurr,bitIndex10)<->v1330(VarCurr,bitIndex16))& (v1478(VarCurr,bitIndex9)<->v1330(VarCurr,bitIndex15))& (v1478(VarCurr,bitIndex8)<->v1330(VarCurr,bitIndex14))& (v1478(VarCurr,bitIndex7)<->v1330(VarCurr,bitIndex13))& (v1478(VarCurr,bitIndex6)<->v1330(VarCurr,bitIndex12))& (v1478(VarCurr,bitIndex5)<->v1330(VarCurr,bitIndex11))& (v1478(VarCurr,bitIndex4)<->v1330(VarCurr,bitIndex10))& (v1478(VarCurr,bitIndex3)<->v1330(VarCurr,bitIndex9))& (v1478(VarCurr,bitIndex2)<->v1330(VarCurr,bitIndex8))& (v1478(VarCurr,bitIndex1)<->v1330(VarCurr,bitIndex7))& (v1478(VarCurr,bitIndex0)<->v1330(VarCurr,bitIndex6))).
% 298.66/296.87  all VarCurr (v1478(VarCurr,bitIndex21)<->$F).
% 298.66/296.87  all VarCurr (v1475(VarCurr)<-> (v1476(VarCurr,bitIndex21)<->$F)& (v1476(VarCurr,bitIndex20)<->$F)& (v1476(VarCurr,bitIndex19)<->$F)& (v1476(VarCurr,bitIndex18)<->$F)& (v1476(VarCurr,bitIndex17)<->$F)& (v1476(VarCurr,bitIndex16)<->$F)& (v1476(VarCurr,bitIndex15)<->$F)& (v1476(VarCurr,bitIndex14)<->$F)& (v1476(VarCurr,bitIndex13)<->$T)& (v1476(VarCurr,bitIndex12)<->$T)& (v1476(VarCurr,bitIndex11)<->$F)& (v1476(VarCurr,bitIndex10)<->$F)& (v1476(VarCurr,bitIndex9)<->$T)& (v1476(VarCurr,bitIndex8)<->$F)& (v1476(VarCurr,bitIndex7)<->$F)& (v1476(VarCurr,bitIndex6)<->$F)& (v1476(VarCurr,bitIndex5)<->$T)& (v1476(VarCurr,bitIndex4)<->$T)& (v1476(VarCurr,bitIndex3)<->$F)& (v1476(VarCurr,bitIndex2)<->$F)& (v1476(VarCurr,bitIndex1)<->$F)& (v1476(VarCurr,bitIndex0)<->$F)).
% 298.66/296.87  -b0000000011001000110000(bitIndex21).
% 298.66/296.87  -b0000000011001000110000(bitIndex20).
% 298.66/296.87  -b0000000011001000110000(bitIndex19).
% 298.66/296.87  -b0000000011001000110000(bitIndex18).
% 298.66/296.87  -b0000000011001000110000(bitIndex17).
% 298.66/296.87  -b0000000011001000110000(bitIndex16).
% 298.66/296.87  -b0000000011001000110000(bitIndex15).
% 298.66/296.87  -b0000000011001000110000(bitIndex14).
% 298.66/296.87  b0000000011001000110000(bitIndex13).
% 298.66/296.87  b0000000011001000110000(bitIndex12).
% 298.66/296.87  -b0000000011001000110000(bitIndex11).
% 298.66/296.87  -b0000000011001000110000(bitIndex10).
% 298.66/296.87  b0000000011001000110000(bitIndex9).
% 298.66/296.87  -b0000000011001000110000(bitIndex8).
% 298.66/296.87  -b0000000011001000110000(bitIndex7).
% 298.66/296.87  -b0000000011001000110000(bitIndex6).
% 298.66/296.87  b0000000011001000110000(bitIndex5).
% 298.66/296.87  b0000000011001000110000(bitIndex4).
% 298.66/296.87  -b0000000011001000110000(bitIndex3).
% 298.66/296.87  -b0000000011001000110000(bitIndex2).
% 298.66/296.87  -b0000000011001000110000(bitIndex1).
% 298.66/296.87  -b0000000011001000110000(bitIndex0).
% 298.66/296.87  all VarCurr ((v1476(VarCurr,bitIndex20)<->v1330(VarCurr,bitIndex26))& (v1476(VarCurr,bitIndex19)<->v1330(VarCurr,bitIndex25))& (v1476(VarCurr,bitIndex18)<->v1330(VarCurr,bitIndex24))& (v1476(VarCurr,bitIndex17)<->v1330(VarCurr,bitIndex23))& (v1476(VarCurr,bitIndex16)<->v1330(VarCurr,bitIndex22))& (v1476(VarCurr,bitIndex15)<->v1330(VarCurr,bitIndex21))& (v1476(VarCurr,bitIndex14)<->v1330(VarCurr,bitIndex20))& (v1476(VarCurr,bitIndex13)<->v1330(VarCurr,bitIndex19))& (v1476(VarCurr,bitIndex12)<->v1330(VarCurr,bitIndex18))& (v1476(VarCurr,bitIndex11)<->v1330(VarCurr,bitIndex17))& (v1476(VarCurr,bitIndex10)<->v1330(VarCurr,bitIndex16))& (v1476(VarCurr,bitIndex9)<->v1330(VarCurr,bitIndex15))& (v1476(VarCurr,bitIndex8)<->v1330(VarCurr,bitIndex14))& (v1476(VarCurr,bitIndex7)<->v1330(VarCurr,bitIndex13))& (v1476(VarCurr,bitIndex6)<->v1330(VarCurr,bitIndex12))& (v1476(VarCurr,bitIndex5)<->v1330(VarCurr,bitIndex11))& (v1476(VarCurr,bitIndex4)<->v1330(VarCurr,bitIndex10))& (v1476(VarCurr,bitIndex3)<->v1330(VarCurr,bitIndex9))& (v1476(VarCurr,bitIndex2)<->v1330(VarCurr,bitIndex8))& (v1476(VarCurr,bitIndex1)<->v1330(VarCurr,bitIndex7))& (v1476(VarCurr,bitIndex0)<->v1330(VarCurr,bitIndex6))).
% 298.70/296.88  all VarCurr (v1476(VarCurr,bitIndex21)<->$F).
% 298.70/296.88  all VarCurr (-v1359(VarCurr)-> (v1462(VarCurr)<->v1466(VarCurr))).
% 298.70/296.88  all VarCurr (v1359(VarCurr)-> (v1462(VarCurr)<->$F)).
% 298.70/296.88  all VarCurr (-v1361(VarCurr)& -v1363(VarCurr)-> (v1466(VarCurr)<->$F)).
% 298.70/296.88  all VarCurr (v1363(VarCurr)-> (v1466(VarCurr)<->v1468(VarCurr))).
% 298.70/296.88  all VarCurr (v1361(VarCurr)-> (v1466(VarCurr)<->v1467(VarCurr))).
% 298.70/296.88  all VarCurr (v1468(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$T)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$T)& (v1330(VarCurr,bitIndex9)<->$F)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$T)& (v1330(VarCurr,bitIndex0)<->$F)).
% 298.70/296.88  -b000000011101000010000000010(bitIndex26).
% 298.70/296.88  -b000000011101000010000000010(bitIndex25).
% 298.70/296.88  -b000000011101000010000000010(bitIndex24).
% 298.70/296.88  -b000000011101000010000000010(bitIndex23).
% 298.70/296.88  -b000000011101000010000000010(bitIndex22).
% 298.70/296.88  -b000000011101000010000000010(bitIndex21).
% 298.70/296.88  -b000000011101000010000000010(bitIndex20).
% 298.70/296.88  b000000011101000010000000010(bitIndex19).
% 298.70/296.88  b000000011101000010000000010(bitIndex18).
% 298.70/296.88  b000000011101000010000000010(bitIndex17).
% 298.70/296.88  -b000000011101000010000000010(bitIndex16).
% 298.70/296.88  b000000011101000010000000010(bitIndex15).
% 298.70/296.88  -b000000011101000010000000010(bitIndex14).
% 298.70/296.88  -b000000011101000010000000010(bitIndex13).
% 298.70/296.88  -b000000011101000010000000010(bitIndex12).
% 298.70/296.88  -b000000011101000010000000010(bitIndex11).
% 298.70/296.88  b000000011101000010000000010(bitIndex10).
% 298.70/296.88  -b000000011101000010000000010(bitIndex9).
% 298.70/296.88  -b000000011101000010000000010(bitIndex8).
% 298.70/296.88  -b000000011101000010000000010(bitIndex7).
% 298.70/296.88  -b000000011101000010000000010(bitIndex6).
% 298.70/296.88  -b000000011101000010000000010(bitIndex5).
% 298.70/296.88  -b000000011101000010000000010(bitIndex4).
% 298.70/296.88  -b000000011101000010000000010(bitIndex3).
% 298.70/296.88  -b000000011101000010000000010(bitIndex2).
% 298.70/296.88  b000000011101000010000000010(bitIndex1).
% 298.70/296.88  -b000000011101000010000000010(bitIndex0).
% 298.70/296.88  all VarCurr (v1467(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$F)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$T)& (v1330(VarCurr,bitIndex9)<->$F)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$T)& (v1330(VarCurr,bitIndex0)<->$F)).
% 298.70/296.88  -b000000011001000010000000010(bitIndex26).
% 298.70/296.88  -b000000011001000010000000010(bitIndex25).
% 298.70/296.88  -b000000011001000010000000010(bitIndex24).
% 298.70/296.88  -b000000011001000010000000010(bitIndex23).
% 298.70/296.88  -b000000011001000010000000010(bitIndex22).
% 298.70/296.88  -b000000011001000010000000010(bitIndex21).
% 298.70/296.88  -b000000011001000010000000010(bitIndex20).
% 298.70/296.88  b000000011001000010000000010(bitIndex19).
% 298.70/296.88  b000000011001000010000000010(bitIndex18).
% 298.70/296.88  -b000000011001000010000000010(bitIndex17).
% 298.70/296.88  -b000000011001000010000000010(bitIndex16).
% 298.70/296.88  b000000011001000010000000010(bitIndex15).
% 298.70/296.88  -b000000011001000010000000010(bitIndex14).
% 298.70/296.88  -b000000011001000010000000010(bitIndex13).
% 298.70/296.89  -b000000011001000010000000010(bitIndex12).
% 298.70/296.89  -b000000011001000010000000010(bitIndex11).
% 298.70/296.89  b000000011001000010000000010(bitIndex10).
% 298.70/296.89  -b000000011001000010000000010(bitIndex9).
% 298.70/296.89  -b000000011001000010000000010(bitIndex8).
% 298.70/296.89  -b000000011001000010000000010(bitIndex7).
% 298.70/296.89  -b000000011001000010000000010(bitIndex6).
% 298.70/296.89  -b000000011001000010000000010(bitIndex5).
% 298.70/296.89  -b000000011001000010000000010(bitIndex4).
% 298.70/296.89  -b000000011001000010000000010(bitIndex3).
% 298.70/296.89  -b000000011001000010000000010(bitIndex2).
% 298.70/296.89  b000000011001000010000000010(bitIndex1).
% 298.70/296.89  -b000000011001000010000000010(bitIndex0).
% 298.70/296.89  all VarCurr (-v1359(VarCurr)-> (v1454(VarCurr)<->v1458(VarCurr))).
% 298.70/296.89  all VarCurr (v1359(VarCurr)-> (v1454(VarCurr)<->$F)).
% 298.70/296.89  all VarCurr (-v1361(VarCurr)& -v1363(VarCurr)-> (v1458(VarCurr)<->$F)).
% 298.70/296.89  all VarCurr (v1363(VarCurr)-> (v1458(VarCurr)<->v1460(VarCurr))).
% 298.70/296.89  all VarCurr (v1361(VarCurr)-> (v1458(VarCurr)<->v1459(VarCurr))).
% 298.70/296.89  all VarCurr (v1460(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$T)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$T)& (v1330(VarCurr,bitIndex9)<->$F)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$F)& (v1330(VarCurr,bitIndex0)<->$T)).
% 298.70/296.89  -b000000011101000010000000001(bitIndex26).
% 298.70/296.89  -b000000011101000010000000001(bitIndex25).
% 298.70/296.89  -b000000011101000010000000001(bitIndex24).
% 298.70/296.89  -b000000011101000010000000001(bitIndex23).
% 298.70/296.89  -b000000011101000010000000001(bitIndex22).
% 298.70/296.89  -b000000011101000010000000001(bitIndex21).
% 298.70/296.89  -b000000011101000010000000001(bitIndex20).
% 298.70/296.89  b000000011101000010000000001(bitIndex19).
% 298.70/296.89  b000000011101000010000000001(bitIndex18).
% 298.70/296.89  b000000011101000010000000001(bitIndex17).
% 298.70/296.89  -b000000011101000010000000001(bitIndex16).
% 298.70/296.89  b000000011101000010000000001(bitIndex15).
% 298.70/296.89  -b000000011101000010000000001(bitIndex14).
% 298.70/296.89  -b000000011101000010000000001(bitIndex13).
% 298.70/296.89  -b000000011101000010000000001(bitIndex12).
% 298.70/296.89  -b000000011101000010000000001(bitIndex11).
% 298.70/296.89  b000000011101000010000000001(bitIndex10).
% 298.70/296.89  -b000000011101000010000000001(bitIndex9).
% 298.70/296.89  -b000000011101000010000000001(bitIndex8).
% 298.70/296.89  -b000000011101000010000000001(bitIndex7).
% 298.70/296.89  -b000000011101000010000000001(bitIndex6).
% 298.70/296.89  -b000000011101000010000000001(bitIndex5).
% 298.70/296.89  -b000000011101000010000000001(bitIndex4).
% 298.70/296.89  -b000000011101000010000000001(bitIndex3).
% 298.70/296.89  -b000000011101000010000000001(bitIndex2).
% 298.70/296.89  -b000000011101000010000000001(bitIndex1).
% 298.70/296.89  b000000011101000010000000001(bitIndex0).
% 298.70/296.89  all VarCurr (v1459(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$F)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$T)& (v1330(VarCurr,bitIndex9)<->$F)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$F)& (v1330(VarCurr,bitIndex0)<->$T)).
% 298.70/296.89  -b000000011001000010000000001(bitIndex26).
% 298.70/296.89  -b000000011001000010000000001(bitIndex25).
% 298.70/296.89  -b000000011001000010000000001(bitIndex24).
% 298.70/296.89  -b000000011001000010000000001(bitIndex23).
% 298.70/296.89  -b000000011001000010000000001(bitIndex22).
% 298.70/296.90  -b000000011001000010000000001(bitIndex21).
% 298.70/296.90  -b000000011001000010000000001(bitIndex20).
% 298.70/296.90  b000000011001000010000000001(bitIndex19).
% 298.70/296.90  b000000011001000010000000001(bitIndex18).
% 298.70/296.90  -b000000011001000010000000001(bitIndex17).
% 298.70/296.90  -b000000011001000010000000001(bitIndex16).
% 298.70/296.90  b000000011001000010000000001(bitIndex15).
% 298.70/296.90  -b000000011001000010000000001(bitIndex14).
% 298.70/296.90  -b000000011001000010000000001(bitIndex13).
% 298.70/296.90  -b000000011001000010000000001(bitIndex12).
% 298.70/296.90  -b000000011001000010000000001(bitIndex11).
% 298.70/296.90  b000000011001000010000000001(bitIndex10).
% 298.70/296.90  -b000000011001000010000000001(bitIndex9).
% 298.70/296.90  -b000000011001000010000000001(bitIndex8).
% 298.70/296.90  -b000000011001000010000000001(bitIndex7).
% 298.70/296.90  -b000000011001000010000000001(bitIndex6).
% 298.70/296.90  -b000000011001000010000000001(bitIndex5).
% 298.70/296.90  -b000000011001000010000000001(bitIndex4).
% 298.70/296.90  -b000000011001000010000000001(bitIndex3).
% 298.70/296.90  -b000000011001000010000000001(bitIndex2).
% 298.70/296.90  -b000000011001000010000000001(bitIndex1).
% 298.70/296.90  b000000011001000010000000001(bitIndex0).
% 298.70/296.90  all VarCurr (-v1359(VarCurr)-> (v1446(VarCurr)<->v1450(VarCurr))).
% 298.70/296.90  all VarCurr (v1359(VarCurr)-> (v1446(VarCurr)<->$F)).
% 298.70/296.90  all VarCurr (-v1361(VarCurr)& -v1363(VarCurr)-> (v1450(VarCurr)<->$F)).
% 298.70/296.90  all VarCurr (v1363(VarCurr)-> (v1450(VarCurr)<->v1452(VarCurr))).
% 298.70/296.90  all VarCurr (v1361(VarCurr)-> (v1450(VarCurr)<->v1451(VarCurr))).
% 298.70/296.90  all VarCurr (v1452(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$T)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$T)& (v1330(VarCurr,bitIndex9)<->$F)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$F)& (v1330(VarCurr,bitIndex0)<->$F)).
% 298.70/296.90  -b000000011101000010000000000(bitIndex26).
% 298.70/296.90  -b000000011101000010000000000(bitIndex25).
% 298.70/296.90  -b000000011101000010000000000(bitIndex24).
% 298.70/296.90  -b000000011101000010000000000(bitIndex23).
% 298.70/296.90  -b000000011101000010000000000(bitIndex22).
% 298.70/296.90  -b000000011101000010000000000(bitIndex21).
% 298.70/296.90  -b000000011101000010000000000(bitIndex20).
% 298.70/296.90  b000000011101000010000000000(bitIndex19).
% 298.70/296.90  b000000011101000010000000000(bitIndex18).
% 298.70/296.90  b000000011101000010000000000(bitIndex17).
% 298.70/296.90  -b000000011101000010000000000(bitIndex16).
% 298.70/296.90  b000000011101000010000000000(bitIndex15).
% 298.70/296.90  -b000000011101000010000000000(bitIndex14).
% 298.70/296.90  -b000000011101000010000000000(bitIndex13).
% 298.70/296.90  -b000000011101000010000000000(bitIndex12).
% 298.70/296.90  -b000000011101000010000000000(bitIndex11).
% 298.70/296.90  b000000011101000010000000000(bitIndex10).
% 298.70/296.90  -b000000011101000010000000000(bitIndex9).
% 298.70/296.90  -b000000011101000010000000000(bitIndex8).
% 298.70/296.90  -b000000011101000010000000000(bitIndex7).
% 298.70/296.90  -b000000011101000010000000000(bitIndex6).
% 298.70/296.90  -b000000011101000010000000000(bitIndex5).
% 298.70/296.90  -b000000011101000010000000000(bitIndex4).
% 298.70/296.90  -b000000011101000010000000000(bitIndex3).
% 298.70/296.90  -b000000011101000010000000000(bitIndex2).
% 298.70/296.90  -b000000011101000010000000000(bitIndex1).
% 298.70/296.90  -b000000011101000010000000000(bitIndex0).
% 298.70/296.90  all VarCurr (v1451(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$F)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$T)& (v1330(VarCurr,bitIndex9)<->$F)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$F)& (v1330(VarCurr,bitIndex0)<->$F)).
% 298.70/296.91  -b000000011001000010000000000(bitIndex26).
% 298.70/296.91  -b000000011001000010000000000(bitIndex25).
% 298.70/296.91  -b000000011001000010000000000(bitIndex24).
% 298.70/296.91  -b000000011001000010000000000(bitIndex23).
% 298.70/296.91  -b000000011001000010000000000(bitIndex22).
% 298.70/296.91  -b000000011001000010000000000(bitIndex21).
% 298.70/296.91  -b000000011001000010000000000(bitIndex20).
% 298.70/296.91  b000000011001000010000000000(bitIndex19).
% 298.70/296.91  b000000011001000010000000000(bitIndex18).
% 298.70/296.91  -b000000011001000010000000000(bitIndex17).
% 298.70/296.91  -b000000011001000010000000000(bitIndex16).
% 298.70/296.91  b000000011001000010000000000(bitIndex15).
% 298.70/296.91  -b000000011001000010000000000(bitIndex14).
% 298.70/296.91  -b000000011001000010000000000(bitIndex13).
% 298.70/296.91  -b000000011001000010000000000(bitIndex12).
% 298.70/296.91  -b000000011001000010000000000(bitIndex11).
% 298.70/296.91  b000000011001000010000000000(bitIndex10).
% 298.70/296.91  -b000000011001000010000000000(bitIndex9).
% 298.70/296.91  -b000000011001000010000000000(bitIndex8).
% 298.70/296.91  -b000000011001000010000000000(bitIndex7).
% 298.70/296.91  -b000000011001000010000000000(bitIndex6).
% 298.70/296.91  -b000000011001000010000000000(bitIndex5).
% 298.70/296.91  -b000000011001000010000000000(bitIndex4).
% 298.70/296.91  -b000000011001000010000000000(bitIndex3).
% 298.70/296.91  -b000000011001000010000000000(bitIndex2).
% 298.70/296.91  -b000000011001000010000000000(bitIndex1).
% 298.70/296.91  -b000000011001000010000000000(bitIndex0).
% 298.70/296.91  all VarCurr (-v1359(VarCurr)-> (v1438(VarCurr)<->v1442(VarCurr))).
% 298.70/296.91  all VarCurr (v1359(VarCurr)-> (v1438(VarCurr)<->$F)).
% 298.70/296.91  all VarCurr (-v1361(VarCurr)& -v1363(VarCurr)-> (v1442(VarCurr)<->$F)).
% 298.70/296.91  all VarCurr (v1363(VarCurr)-> (v1442(VarCurr)<->v1444(VarCurr))).
% 298.70/296.91  all VarCurr (v1361(VarCurr)-> (v1442(VarCurr)<->v1443(VarCurr))).
% 298.70/296.91  all VarCurr (v1444(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$T)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$T)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$T)& (v1330(VarCurr,bitIndex1)<->$T)& (v1330(VarCurr,bitIndex0)<->$F)).
% 298.70/296.91  -b000000011101000001000000110(bitIndex26).
% 298.70/296.91  -b000000011101000001000000110(bitIndex25).
% 298.70/296.91  -b000000011101000001000000110(bitIndex24).
% 298.70/296.91  -b000000011101000001000000110(bitIndex23).
% 298.70/296.91  -b000000011101000001000000110(bitIndex22).
% 298.70/296.91  -b000000011101000001000000110(bitIndex21).
% 298.70/296.91  -b000000011101000001000000110(bitIndex20).
% 298.70/296.91  b000000011101000001000000110(bitIndex19).
% 298.70/296.91  b000000011101000001000000110(bitIndex18).
% 298.70/296.91  b000000011101000001000000110(bitIndex17).
% 298.70/296.91  -b000000011101000001000000110(bitIndex16).
% 298.70/296.91  b000000011101000001000000110(bitIndex15).
% 298.70/296.91  -b000000011101000001000000110(bitIndex14).
% 298.70/296.91  -b000000011101000001000000110(bitIndex13).
% 298.70/296.91  -b000000011101000001000000110(bitIndex12).
% 298.70/296.91  -b000000011101000001000000110(bitIndex11).
% 298.70/296.91  -b000000011101000001000000110(bitIndex10).
% 298.70/296.91  b000000011101000001000000110(bitIndex9).
% 298.70/296.91  -b000000011101000001000000110(bitIndex8).
% 298.70/296.91  -b000000011101000001000000110(bitIndex7).
% 298.70/296.91  -b000000011101000001000000110(bitIndex6).
% 298.70/296.91  -b000000011101000001000000110(bitIndex5).
% 298.70/296.91  -b000000011101000001000000110(bitIndex4).
% 298.70/296.91  -b000000011101000001000000110(bitIndex3).
% 298.70/296.91  b000000011101000001000000110(bitIndex2).
% 298.70/296.91  b000000011101000001000000110(bitIndex1).
% 298.70/296.91  -b000000011101000001000000110(bitIndex0).
% 298.70/296.91  all VarCurr (v1443(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$F)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$T)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$T)& (v1330(VarCurr,bitIndex1)<->$T)& (v1330(VarCurr,bitIndex0)<->$F)).
% 298.70/296.92  -b000000011001000001000000110(bitIndex26).
% 298.70/296.92  -b000000011001000001000000110(bitIndex25).
% 298.70/296.92  -b000000011001000001000000110(bitIndex24).
% 298.70/296.92  -b000000011001000001000000110(bitIndex23).
% 298.70/296.92  -b000000011001000001000000110(bitIndex22).
% 298.70/296.92  -b000000011001000001000000110(bitIndex21).
% 298.70/296.92  -b000000011001000001000000110(bitIndex20).
% 298.70/296.92  b000000011001000001000000110(bitIndex19).
% 298.70/296.92  b000000011001000001000000110(bitIndex18).
% 298.70/296.92  -b000000011001000001000000110(bitIndex17).
% 298.70/296.92  -b000000011001000001000000110(bitIndex16).
% 298.70/296.92  b000000011001000001000000110(bitIndex15).
% 298.70/296.92  -b000000011001000001000000110(bitIndex14).
% 298.70/296.92  -b000000011001000001000000110(bitIndex13).
% 298.70/296.92  -b000000011001000001000000110(bitIndex12).
% 298.70/296.92  -b000000011001000001000000110(bitIndex11).
% 298.70/296.92  -b000000011001000001000000110(bitIndex10).
% 298.70/296.92  b000000011001000001000000110(bitIndex9).
% 298.70/296.92  -b000000011001000001000000110(bitIndex8).
% 298.70/296.92  -b000000011001000001000000110(bitIndex7).
% 298.70/296.92  -b000000011001000001000000110(bitIndex6).
% 298.70/296.92  -b000000011001000001000000110(bitIndex5).
% 298.70/296.92  -b000000011001000001000000110(bitIndex4).
% 298.70/296.92  -b000000011001000001000000110(bitIndex3).
% 298.70/296.92  b000000011001000001000000110(bitIndex2).
% 298.70/296.92  b000000011001000001000000110(bitIndex1).
% 298.70/296.92  -b000000011001000001000000110(bitIndex0).
% 298.70/296.92  all VarCurr (-v1359(VarCurr)-> (v1430(VarCurr)<->v1434(VarCurr))).
% 298.70/296.92  all VarCurr (v1359(VarCurr)-> (v1430(VarCurr)<->$F)).
% 298.70/296.92  all VarCurr (-v1361(VarCurr)& -v1363(VarCurr)-> (v1434(VarCurr)<->$F)).
% 298.70/296.92  all VarCurr (v1363(VarCurr)-> (v1434(VarCurr)<->v1436(VarCurr))).
% 298.70/296.92  all VarCurr (v1361(VarCurr)-> (v1434(VarCurr)<->v1435(VarCurr))).
% 298.70/296.92  all VarCurr (v1436(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$T)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$T)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$T)& (v1330(VarCurr,bitIndex1)<->$F)& (v1330(VarCurr,bitIndex0)<->$T)).
% 298.70/296.92  -b000000011101000001000000101(bitIndex26).
% 298.70/296.92  -b000000011101000001000000101(bitIndex25).
% 298.70/296.92  -b000000011101000001000000101(bitIndex24).
% 298.70/296.92  -b000000011101000001000000101(bitIndex23).
% 298.70/296.92  -b000000011101000001000000101(bitIndex22).
% 298.70/296.92  -b000000011101000001000000101(bitIndex21).
% 298.70/296.92  -b000000011101000001000000101(bitIndex20).
% 298.70/296.92  b000000011101000001000000101(bitIndex19).
% 298.70/296.92  b000000011101000001000000101(bitIndex18).
% 298.70/296.92  b000000011101000001000000101(bitIndex17).
% 298.70/296.92  -b000000011101000001000000101(bitIndex16).
% 298.70/296.92  b000000011101000001000000101(bitIndex15).
% 298.70/296.92  -b000000011101000001000000101(bitIndex14).
% 298.70/296.92  -b000000011101000001000000101(bitIndex13).
% 298.70/296.92  -b000000011101000001000000101(bitIndex12).
% 298.70/296.92  -b000000011101000001000000101(bitIndex11).
% 298.70/296.92  -b000000011101000001000000101(bitIndex10).
% 298.70/296.92  b000000011101000001000000101(bitIndex9).
% 298.70/296.92  -b000000011101000001000000101(bitIndex8).
% 298.70/296.92  -b000000011101000001000000101(bitIndex7).
% 298.70/296.92  -b000000011101000001000000101(bitIndex6).
% 298.70/296.92  -b000000011101000001000000101(bitIndex5).
% 298.70/296.92  -b000000011101000001000000101(bitIndex4).
% 298.70/296.92  -b000000011101000001000000101(bitIndex3).
% 298.70/296.92  b000000011101000001000000101(bitIndex2).
% 298.70/296.93  -b000000011101000001000000101(bitIndex1).
% 298.70/296.93  b000000011101000001000000101(bitIndex0).
% 298.70/296.93  all VarCurr (v1435(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$F)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$T)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$T)& (v1330(VarCurr,bitIndex1)<->$F)& (v1330(VarCurr,bitIndex0)<->$T)).
% 298.70/296.93  -b000000011001000001000000101(bitIndex26).
% 298.70/296.93  -b000000011001000001000000101(bitIndex25).
% 298.70/296.93  -b000000011001000001000000101(bitIndex24).
% 298.70/296.93  -b000000011001000001000000101(bitIndex23).
% 298.70/296.93  -b000000011001000001000000101(bitIndex22).
% 298.70/296.93  -b000000011001000001000000101(bitIndex21).
% 298.70/296.93  -b000000011001000001000000101(bitIndex20).
% 298.70/296.93  b000000011001000001000000101(bitIndex19).
% 298.70/296.93  b000000011001000001000000101(bitIndex18).
% 298.70/296.93  -b000000011001000001000000101(bitIndex17).
% 298.70/296.93  -b000000011001000001000000101(bitIndex16).
% 298.70/296.93  b000000011001000001000000101(bitIndex15).
% 298.70/296.93  -b000000011001000001000000101(bitIndex14).
% 298.70/296.93  -b000000011001000001000000101(bitIndex13).
% 298.70/296.93  -b000000011001000001000000101(bitIndex12).
% 298.70/296.93  -b000000011001000001000000101(bitIndex11).
% 298.70/296.93  -b000000011001000001000000101(bitIndex10).
% 298.70/296.93  b000000011001000001000000101(bitIndex9).
% 298.70/296.93  -b000000011001000001000000101(bitIndex8).
% 298.70/296.93  -b000000011001000001000000101(bitIndex7).
% 298.70/296.93  -b000000011001000001000000101(bitIndex6).
% 298.70/296.93  -b000000011001000001000000101(bitIndex5).
% 298.70/296.93  -b000000011001000001000000101(bitIndex4).
% 298.70/296.93  -b000000011001000001000000101(bitIndex3).
% 298.70/296.93  b000000011001000001000000101(bitIndex2).
% 298.70/296.93  -b000000011001000001000000101(bitIndex1).
% 298.70/296.93  b000000011001000001000000101(bitIndex0).
% 298.70/296.93  all VarCurr (-v1359(VarCurr)-> (v1422(VarCurr)<->v1426(VarCurr))).
% 298.70/296.93  all VarCurr (v1359(VarCurr)-> (v1422(VarCurr)<->$F)).
% 298.70/296.93  all VarCurr (-v1361(VarCurr)& -v1363(VarCurr)-> (v1426(VarCurr)<->$F)).
% 298.70/296.93  all VarCurr (v1363(VarCurr)-> (v1426(VarCurr)<->v1428(VarCurr))).
% 298.70/296.93  all VarCurr (v1361(VarCurr)-> (v1426(VarCurr)<->v1427(VarCurr))).
% 298.70/296.93  all VarCurr (v1428(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$T)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$T)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$T)& (v1330(VarCurr,bitIndex0)<->$T)).
% 298.70/296.93  -b000000011101000001000000011(bitIndex26).
% 298.70/296.93  -b000000011101000001000000011(bitIndex25).
% 298.70/296.93  -b000000011101000001000000011(bitIndex24).
% 298.70/296.93  -b000000011101000001000000011(bitIndex23).
% 298.70/296.93  -b000000011101000001000000011(bitIndex22).
% 298.70/296.93  -b000000011101000001000000011(bitIndex21).
% 298.70/296.93  -b000000011101000001000000011(bitIndex20).
% 298.70/296.93  b000000011101000001000000011(bitIndex19).
% 298.70/296.93  b000000011101000001000000011(bitIndex18).
% 298.70/296.93  b000000011101000001000000011(bitIndex17).
% 298.70/296.93  -b000000011101000001000000011(bitIndex16).
% 298.70/296.93  b000000011101000001000000011(bitIndex15).
% 298.70/296.93  -b000000011101000001000000011(bitIndex14).
% 298.70/296.93  -b000000011101000001000000011(bitIndex13).
% 298.70/296.93  -b000000011101000001000000011(bitIndex12).
% 298.70/296.93  -b000000011101000001000000011(bitIndex11).
% 298.75/296.94  -b000000011101000001000000011(bitIndex10).
% 298.75/296.94  b000000011101000001000000011(bitIndex9).
% 298.75/296.94  -b000000011101000001000000011(bitIndex8).
% 298.75/296.94  -b000000011101000001000000011(bitIndex7).
% 298.75/296.94  -b000000011101000001000000011(bitIndex6).
% 298.75/296.94  -b000000011101000001000000011(bitIndex5).
% 298.75/296.94  -b000000011101000001000000011(bitIndex4).
% 298.75/296.94  -b000000011101000001000000011(bitIndex3).
% 298.75/296.94  -b000000011101000001000000011(bitIndex2).
% 298.75/296.94  b000000011101000001000000011(bitIndex1).
% 298.75/296.94  b000000011101000001000000011(bitIndex0).
% 298.75/296.94  all VarCurr (v1427(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$F)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$T)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$T)& (v1330(VarCurr,bitIndex0)<->$T)).
% 298.75/296.94  -b000000011001000001000000011(bitIndex26).
% 298.75/296.94  -b000000011001000001000000011(bitIndex25).
% 298.75/296.94  -b000000011001000001000000011(bitIndex24).
% 298.75/296.94  -b000000011001000001000000011(bitIndex23).
% 298.75/296.94  -b000000011001000001000000011(bitIndex22).
% 298.75/296.94  -b000000011001000001000000011(bitIndex21).
% 298.75/296.94  -b000000011001000001000000011(bitIndex20).
% 298.75/296.94  b000000011001000001000000011(bitIndex19).
% 298.75/296.94  b000000011001000001000000011(bitIndex18).
% 298.75/296.94  -b000000011001000001000000011(bitIndex17).
% 298.75/296.94  -b000000011001000001000000011(bitIndex16).
% 298.75/296.94  b000000011001000001000000011(bitIndex15).
% 298.75/296.94  -b000000011001000001000000011(bitIndex14).
% 298.75/296.94  -b000000011001000001000000011(bitIndex13).
% 298.75/296.94  -b000000011001000001000000011(bitIndex12).
% 298.75/296.94  -b000000011001000001000000011(bitIndex11).
% 298.75/296.94  -b000000011001000001000000011(bitIndex10).
% 298.75/296.94  b000000011001000001000000011(bitIndex9).
% 298.75/296.94  -b000000011001000001000000011(bitIndex8).
% 298.75/296.94  -b000000011001000001000000011(bitIndex7).
% 298.75/296.94  -b000000011001000001000000011(bitIndex6).
% 298.75/296.94  -b000000011001000001000000011(bitIndex5).
% 298.75/296.94  -b000000011001000001000000011(bitIndex4).
% 298.75/296.94  -b000000011001000001000000011(bitIndex3).
% 298.75/296.94  -b000000011001000001000000011(bitIndex2).
% 298.75/296.94  b000000011001000001000000011(bitIndex1).
% 298.75/296.94  b000000011001000001000000011(bitIndex0).
% 298.75/296.94  all VarCurr (-v1359(VarCurr)-> (v1414(VarCurr)<->v1418(VarCurr))).
% 298.75/296.94  all VarCurr (v1359(VarCurr)-> (v1414(VarCurr)<->$F)).
% 298.75/296.94  all VarCurr (-v1361(VarCurr)& -v1363(VarCurr)-> (v1418(VarCurr)<->$F)).
% 298.75/296.94  all VarCurr (v1363(VarCurr)-> (v1418(VarCurr)<->v1420(VarCurr))).
% 298.75/296.94  all VarCurr (v1361(VarCurr)-> (v1418(VarCurr)<->v1419(VarCurr))).
% 298.75/296.94  all VarCurr (v1420(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$T)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$T)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$T)& (v1330(VarCurr,bitIndex1)<->$F)& (v1330(VarCurr,bitIndex0)<->$F)).
% 298.75/296.94  -b000000011101000001000000100(bitIndex26).
% 298.75/296.94  -b000000011101000001000000100(bitIndex25).
% 298.75/296.94  -b000000011101000001000000100(bitIndex24).
% 298.75/296.94  -b000000011101000001000000100(bitIndex23).
% 298.75/296.94  -b000000011101000001000000100(bitIndex22).
% 298.75/296.94  -b000000011101000001000000100(bitIndex21).
% 298.75/296.94  -b000000011101000001000000100(bitIndex20).
% 298.75/296.95  b000000011101000001000000100(bitIndex19).
% 298.75/296.95  b000000011101000001000000100(bitIndex18).
% 298.75/296.95  b000000011101000001000000100(bitIndex17).
% 298.75/296.95  -b000000011101000001000000100(bitIndex16).
% 298.75/296.95  b000000011101000001000000100(bitIndex15).
% 298.75/296.95  -b000000011101000001000000100(bitIndex14).
% 298.75/296.95  -b000000011101000001000000100(bitIndex13).
% 298.75/296.95  -b000000011101000001000000100(bitIndex12).
% 298.75/296.95  -b000000011101000001000000100(bitIndex11).
% 298.75/296.95  -b000000011101000001000000100(bitIndex10).
% 298.75/296.95  b000000011101000001000000100(bitIndex9).
% 298.75/296.95  -b000000011101000001000000100(bitIndex8).
% 298.75/296.95  -b000000011101000001000000100(bitIndex7).
% 298.75/296.95  -b000000011101000001000000100(bitIndex6).
% 298.75/296.95  -b000000011101000001000000100(bitIndex5).
% 298.75/296.95  -b000000011101000001000000100(bitIndex4).
% 298.75/296.95  -b000000011101000001000000100(bitIndex3).
% 298.75/296.95  b000000011101000001000000100(bitIndex2).
% 298.75/296.95  -b000000011101000001000000100(bitIndex1).
% 298.75/296.95  -b000000011101000001000000100(bitIndex0).
% 298.75/296.95  all VarCurr (v1419(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$F)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$T)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$T)& (v1330(VarCurr,bitIndex1)<->$F)& (v1330(VarCurr,bitIndex0)<->$F)).
% 298.75/296.95  -b000000011001000001000000100(bitIndex26).
% 298.75/296.95  -b000000011001000001000000100(bitIndex25).
% 298.75/296.95  -b000000011001000001000000100(bitIndex24).
% 298.75/296.95  -b000000011001000001000000100(bitIndex23).
% 298.75/296.95  -b000000011001000001000000100(bitIndex22).
% 298.75/296.95  -b000000011001000001000000100(bitIndex21).
% 298.75/296.95  -b000000011001000001000000100(bitIndex20).
% 298.75/296.95  b000000011001000001000000100(bitIndex19).
% 298.75/296.95  b000000011001000001000000100(bitIndex18).
% 298.75/296.95  -b000000011001000001000000100(bitIndex17).
% 298.75/296.95  -b000000011001000001000000100(bitIndex16).
% 298.75/296.95  b000000011001000001000000100(bitIndex15).
% 298.75/296.95  -b000000011001000001000000100(bitIndex14).
% 298.75/296.95  -b000000011001000001000000100(bitIndex13).
% 298.75/296.95  -b000000011001000001000000100(bitIndex12).
% 298.75/296.95  -b000000011001000001000000100(bitIndex11).
% 298.75/296.95  -b000000011001000001000000100(bitIndex10).
% 298.75/296.95  b000000011001000001000000100(bitIndex9).
% 298.75/296.95  -b000000011001000001000000100(bitIndex8).
% 298.75/296.95  -b000000011001000001000000100(bitIndex7).
% 298.75/296.95  -b000000011001000001000000100(bitIndex6).
% 298.75/296.95  -b000000011001000001000000100(bitIndex5).
% 298.75/296.95  -b000000011001000001000000100(bitIndex4).
% 298.75/296.95  -b000000011001000001000000100(bitIndex3).
% 298.75/296.95  b000000011001000001000000100(bitIndex2).
% 298.75/296.95  -b000000011001000001000000100(bitIndex1).
% 298.75/296.95  -b000000011001000001000000100(bitIndex0).
% 298.75/296.95  all VarCurr (-v1359(VarCurr)-> (v1406(VarCurr)<->v1410(VarCurr))).
% 298.75/296.95  all VarCurr (v1359(VarCurr)-> (v1406(VarCurr)<->$F)).
% 298.75/296.95  all VarCurr (-v1361(VarCurr)& -v1363(VarCurr)-> (v1410(VarCurr)<->$F)).
% 298.75/296.95  all VarCurr (v1363(VarCurr)-> (v1410(VarCurr)<->v1412(VarCurr))).
% 298.75/296.95  all VarCurr (v1361(VarCurr)-> (v1410(VarCurr)<->v1411(VarCurr))).
% 298.75/296.95  all VarCurr (v1412(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$T)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$T)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$T)& (v1330(VarCurr,bitIndex0)<->$F)).
% 298.77/296.95  -b000000011101000001000000010(bitIndex26).
% 298.77/296.95  -b000000011101000001000000010(bitIndex25).
% 298.77/296.95  -b000000011101000001000000010(bitIndex24).
% 298.77/296.95  -b000000011101000001000000010(bitIndex23).
% 298.77/296.95  -b000000011101000001000000010(bitIndex22).
% 298.77/296.95  -b000000011101000001000000010(bitIndex21).
% 298.77/296.95  -b000000011101000001000000010(bitIndex20).
% 298.77/296.95  b000000011101000001000000010(bitIndex19).
% 298.77/296.95  b000000011101000001000000010(bitIndex18).
% 298.77/296.95  b000000011101000001000000010(bitIndex17).
% 298.77/296.95  -b000000011101000001000000010(bitIndex16).
% 298.77/296.95  b000000011101000001000000010(bitIndex15).
% 298.77/296.95  -b000000011101000001000000010(bitIndex14).
% 298.77/296.95  -b000000011101000001000000010(bitIndex13).
% 298.77/296.95  -b000000011101000001000000010(bitIndex12).
% 298.77/296.95  -b000000011101000001000000010(bitIndex11).
% 298.77/296.95  -b000000011101000001000000010(bitIndex10).
% 298.77/296.95  b000000011101000001000000010(bitIndex9).
% 298.77/296.95  -b000000011101000001000000010(bitIndex8).
% 298.77/296.95  -b000000011101000001000000010(bitIndex7).
% 298.77/296.95  -b000000011101000001000000010(bitIndex6).
% 298.77/296.95  -b000000011101000001000000010(bitIndex5).
% 298.77/296.95  -b000000011101000001000000010(bitIndex4).
% 298.77/296.95  -b000000011101000001000000010(bitIndex3).
% 298.77/296.95  -b000000011101000001000000010(bitIndex2).
% 298.77/296.95  b000000011101000001000000010(bitIndex1).
% 298.77/296.95  -b000000011101000001000000010(bitIndex0).
% 298.77/296.95  all VarCurr (v1411(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$F)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$T)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$T)& (v1330(VarCurr,bitIndex0)<->$F)).
% 298.77/296.95  -b000000011001000001000000010(bitIndex26).
% 298.77/296.95  -b000000011001000001000000010(bitIndex25).
% 298.77/296.95  -b000000011001000001000000010(bitIndex24).
% 298.77/296.95  -b000000011001000001000000010(bitIndex23).
% 298.77/296.95  -b000000011001000001000000010(bitIndex22).
% 298.77/296.95  -b000000011001000001000000010(bitIndex21).
% 298.77/296.95  -b000000011001000001000000010(bitIndex20).
% 298.77/296.95  b000000011001000001000000010(bitIndex19).
% 298.77/296.95  b000000011001000001000000010(bitIndex18).
% 298.77/296.95  -b000000011001000001000000010(bitIndex17).
% 298.77/296.95  -b000000011001000001000000010(bitIndex16).
% 298.77/296.95  b000000011001000001000000010(bitIndex15).
% 298.77/296.95  -b000000011001000001000000010(bitIndex14).
% 298.77/296.95  -b000000011001000001000000010(bitIndex13).
% 298.77/296.95  -b000000011001000001000000010(bitIndex12).
% 298.77/296.95  -b000000011001000001000000010(bitIndex11).
% 298.77/296.95  -b000000011001000001000000010(bitIndex10).
% 298.77/296.95  b000000011001000001000000010(bitIndex9).
% 298.77/296.95  -b000000011001000001000000010(bitIndex8).
% 298.77/296.95  -b000000011001000001000000010(bitIndex7).
% 298.77/296.96  -b000000011001000001000000010(bitIndex6).
% 298.77/296.96  -b000000011001000001000000010(bitIndex5).
% 298.77/296.96  -b000000011001000001000000010(bitIndex4).
% 298.77/296.96  -b000000011001000001000000010(bitIndex3).
% 298.77/296.96  -b000000011001000001000000010(bitIndex2).
% 298.77/296.96  b000000011001000001000000010(bitIndex1).
% 298.77/296.96  -b000000011001000001000000010(bitIndex0).
% 298.77/296.96  all VarCurr (-v1359(VarCurr)-> (v1398(VarCurr)<->v1402(VarCurr))).
% 298.77/296.96  all VarCurr (v1359(VarCurr)-> (v1398(VarCurr)<->$F)).
% 298.77/296.96  all VarCurr (-v1361(VarCurr)& -v1363(VarCurr)-> (v1402(VarCurr)<->$F)).
% 298.77/296.96  all VarCurr (v1363(VarCurr)-> (v1402(VarCurr)<->v1404(VarCurr))).
% 298.77/296.96  all VarCurr (v1361(VarCurr)-> (v1402(VarCurr)<->v1403(VarCurr))).
% 298.77/296.96  all VarCurr (v1404(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$T)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$T)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$F)& (v1330(VarCurr,bitIndex0)<->$T)).
% 298.77/296.96  -b000000011101000001000000001(bitIndex26).
% 298.77/296.96  -b000000011101000001000000001(bitIndex25).
% 298.77/296.96  -b000000011101000001000000001(bitIndex24).
% 298.77/296.96  -b000000011101000001000000001(bitIndex23).
% 298.77/296.96  -b000000011101000001000000001(bitIndex22).
% 298.77/296.96  -b000000011101000001000000001(bitIndex21).
% 298.77/296.96  -b000000011101000001000000001(bitIndex20).
% 298.77/296.96  b000000011101000001000000001(bitIndex19).
% 298.77/296.96  b000000011101000001000000001(bitIndex18).
% 298.77/296.96  b000000011101000001000000001(bitIndex17).
% 298.77/296.96  -b000000011101000001000000001(bitIndex16).
% 298.77/296.96  b000000011101000001000000001(bitIndex15).
% 298.77/296.96  -b000000011101000001000000001(bitIndex14).
% 298.77/296.96  -b000000011101000001000000001(bitIndex13).
% 298.77/296.96  -b000000011101000001000000001(bitIndex12).
% 298.77/296.96  -b000000011101000001000000001(bitIndex11).
% 298.77/296.96  -b000000011101000001000000001(bitIndex10).
% 298.77/296.96  b000000011101000001000000001(bitIndex9).
% 298.77/296.96  -b000000011101000001000000001(bitIndex8).
% 298.77/296.96  -b000000011101000001000000001(bitIndex7).
% 298.77/296.96  -b000000011101000001000000001(bitIndex6).
% 298.77/296.96  -b000000011101000001000000001(bitIndex5).
% 298.77/296.96  -b000000011101000001000000001(bitIndex4).
% 298.77/296.96  -b000000011101000001000000001(bitIndex3).
% 298.77/296.96  -b000000011101000001000000001(bitIndex2).
% 298.77/296.96  -b000000011101000001000000001(bitIndex1).
% 298.77/296.96  b000000011101000001000000001(bitIndex0).
% 298.77/296.96  all VarCurr (v1403(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$F)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$T)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$F)& (v1330(VarCurr,bitIndex0)<->$T)).
% 298.77/296.96  -b000000011001000001000000001(bitIndex26).
% 298.77/296.96  -b000000011001000001000000001(bitIndex25).
% 298.77/296.96  -b000000011001000001000000001(bitIndex24).
% 298.77/296.96  -b000000011001000001000000001(bitIndex23).
% 298.77/296.96  -b000000011001000001000000001(bitIndex22).
% 298.77/296.96  -b000000011001000001000000001(bitIndex21).
% 298.77/296.96  -b000000011001000001000000001(bitIndex20).
% 298.77/296.96  b000000011001000001000000001(bitIndex19).
% 298.77/296.96  b000000011001000001000000001(bitIndex18).
% 298.77/296.96  -b000000011001000001000000001(bitIndex17).
% 298.77/296.96  -b000000011001000001000000001(bitIndex16).
% 298.77/296.96  b000000011001000001000000001(bitIndex15).
% 298.77/296.96  -b000000011001000001000000001(bitIndex14).
% 298.77/296.96  -b000000011001000001000000001(bitIndex13).
% 298.77/296.96  -b000000011001000001000000001(bitIndex12).
% 298.77/296.96  -b000000011001000001000000001(bitIndex11).
% 298.77/296.96  -b000000011001000001000000001(bitIndex10).
% 298.77/296.96  b000000011001000001000000001(bitIndex9).
% 298.77/296.96  -b000000011001000001000000001(bitIndex8).
% 298.77/296.96  -b000000011001000001000000001(bitIndex7).
% 298.77/296.96  -b000000011001000001000000001(bitIndex6).
% 298.77/296.96  -b000000011001000001000000001(bitIndex5).
% 298.77/296.96  -b000000011001000001000000001(bitIndex4).
% 298.77/296.96  -b000000011001000001000000001(bitIndex3).
% 298.77/296.96  -b000000011001000001000000001(bitIndex2).
% 298.77/296.96  -b000000011001000001000000001(bitIndex1).
% 298.77/296.96  b000000011001000001000000001(bitIndex0).
% 298.77/296.96  all VarCurr (-v1359(VarCurr)-> (v1390(VarCurr)<->v1394(VarCurr))).
% 298.77/296.96  all VarCurr (v1359(VarCurr)-> (v1390(VarCurr)<->$F)).
% 298.77/296.96  all VarCurr (-v1361(VarCurr)& -v1363(VarCurr)-> (v1394(VarCurr)<->$F)).
% 298.77/296.96  all VarCurr (v1363(VarCurr)-> (v1394(VarCurr)<->v1396(VarCurr))).
% 298.77/296.96  all VarCurr (v1361(VarCurr)-> (v1394(VarCurr)<->v1395(VarCurr))).
% 298.77/296.96  all VarCurr (v1396(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$T)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$T)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$F)& (v1330(VarCurr,bitIndex0)<->$F)).
% 298.77/296.97  -b000000011101000001000000000(bitIndex26).
% 298.77/296.97  -b000000011101000001000000000(bitIndex25).
% 298.77/296.97  -b000000011101000001000000000(bitIndex24).
% 298.77/296.97  -b000000011101000001000000000(bitIndex23).
% 298.77/296.97  -b000000011101000001000000000(bitIndex22).
% 298.77/296.97  -b000000011101000001000000000(bitIndex21).
% 298.77/296.97  -b000000011101000001000000000(bitIndex20).
% 298.77/296.97  b000000011101000001000000000(bitIndex19).
% 298.77/296.97  b000000011101000001000000000(bitIndex18).
% 298.77/296.97  b000000011101000001000000000(bitIndex17).
% 298.77/296.97  -b000000011101000001000000000(bitIndex16).
% 298.77/296.97  b000000011101000001000000000(bitIndex15).
% 298.77/296.97  -b000000011101000001000000000(bitIndex14).
% 298.77/296.97  -b000000011101000001000000000(bitIndex13).
% 298.77/296.97  -b000000011101000001000000000(bitIndex12).
% 298.77/296.97  -b000000011101000001000000000(bitIndex11).
% 298.77/296.97  -b000000011101000001000000000(bitIndex10).
% 298.77/296.97  b000000011101000001000000000(bitIndex9).
% 298.77/296.97  -b000000011101000001000000000(bitIndex8).
% 298.77/296.97  -b000000011101000001000000000(bitIndex7).
% 298.77/296.97  -b000000011101000001000000000(bitIndex6).
% 298.77/296.97  -b000000011101000001000000000(bitIndex5).
% 298.77/296.97  -b000000011101000001000000000(bitIndex4).
% 298.77/296.97  -b000000011101000001000000000(bitIndex3).
% 298.77/296.97  -b000000011101000001000000000(bitIndex2).
% 298.77/296.97  -b000000011101000001000000000(bitIndex1).
% 298.77/296.97  -b000000011101000001000000000(bitIndex0).
% 298.77/296.97  all VarCurr (v1395(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$F)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$T)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$F)& (v1330(VarCurr,bitIndex0)<->$F)).
% 298.77/296.97  -b000000011001000001000000000(bitIndex26).
% 298.77/296.97  -b000000011001000001000000000(bitIndex25).
% 298.77/296.97  -b000000011001000001000000000(bitIndex24).
% 298.77/296.97  -b000000011001000001000000000(bitIndex23).
% 298.77/296.97  -b000000011001000001000000000(bitIndex22).
% 298.77/296.97  -b000000011001000001000000000(bitIndex21).
% 298.77/296.97  -b000000011001000001000000000(bitIndex20).
% 298.77/296.97  b000000011001000001000000000(bitIndex19).
% 298.77/296.97  b000000011001000001000000000(bitIndex18).
% 298.77/296.97  -b000000011001000001000000000(bitIndex17).
% 298.77/296.97  -b000000011001000001000000000(bitIndex16).
% 298.77/296.97  b000000011001000001000000000(bitIndex15).
% 298.77/296.97  -b000000011001000001000000000(bitIndex14).
% 298.77/296.97  -b000000011001000001000000000(bitIndex13).
% 298.77/296.97  -b000000011001000001000000000(bitIndex12).
% 298.77/296.97  -b000000011001000001000000000(bitIndex11).
% 298.77/296.97  -b000000011001000001000000000(bitIndex10).
% 298.77/296.97  b000000011001000001000000000(bitIndex9).
% 298.77/296.97  -b000000011001000001000000000(bitIndex8).
% 298.77/296.97  -b000000011001000001000000000(bitIndex7).
% 298.77/296.97  -b000000011001000001000000000(bitIndex6).
% 298.77/296.97  -b000000011001000001000000000(bitIndex5).
% 298.77/296.97  -b000000011001000001000000000(bitIndex4).
% 298.77/296.97  -b000000011001000001000000000(bitIndex3).
% 298.77/296.97  -b000000011001000001000000000(bitIndex2).
% 298.77/296.97  -b000000011001000001000000000(bitIndex1).
% 298.77/296.98  -b000000011001000001000000000(bitIndex0).
% 298.77/296.98  all VarCurr (-v1359(VarCurr)-> (v1382(VarCurr)<->v1386(VarCurr))).
% 298.77/296.98  all VarCurr (v1359(VarCurr)-> (v1382(VarCurr)<->$F)).
% 298.77/296.98  all VarCurr (-v1361(VarCurr)& -v1363(VarCurr)-> (v1386(VarCurr)<->$F)).
% 298.77/296.98  all VarCurr (v1363(VarCurr)-> (v1386(VarCurr)<->v1388(VarCurr))).
% 298.77/296.98  all VarCurr (v1361(VarCurr)-> (v1386(VarCurr)<->v1387(VarCurr))).
% 298.77/296.98  all VarCurr (v1388(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$T)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$F)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$T)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$F)& (v1330(VarCurr,bitIndex0)<->$T)).
% 298.77/296.98  -b000000011101000000000100001(bitIndex26).
% 298.77/296.98  -b000000011101000000000100001(bitIndex25).
% 298.77/296.98  -b000000011101000000000100001(bitIndex24).
% 298.77/296.98  -b000000011101000000000100001(bitIndex23).
% 298.77/296.98  -b000000011101000000000100001(bitIndex22).
% 298.77/296.98  -b000000011101000000000100001(bitIndex21).
% 298.77/296.98  -b000000011101000000000100001(bitIndex20).
% 298.77/296.98  b000000011101000000000100001(bitIndex19).
% 298.77/296.98  b000000011101000000000100001(bitIndex18).
% 298.77/296.98  b000000011101000000000100001(bitIndex17).
% 298.77/296.98  -b000000011101000000000100001(bitIndex16).
% 298.77/296.98  b000000011101000000000100001(bitIndex15).
% 298.77/296.98  -b000000011101000000000100001(bitIndex14).
% 298.77/296.98  -b000000011101000000000100001(bitIndex13).
% 298.77/296.98  -b000000011101000000000100001(bitIndex12).
% 298.77/296.98  -b000000011101000000000100001(bitIndex11).
% 298.77/296.98  -b000000011101000000000100001(bitIndex10).
% 298.77/296.98  -b000000011101000000000100001(bitIndex9).
% 298.77/296.98  -b000000011101000000000100001(bitIndex8).
% 298.77/296.98  -b000000011101000000000100001(bitIndex7).
% 298.77/296.98  -b000000011101000000000100001(bitIndex6).
% 298.77/296.98  b000000011101000000000100001(bitIndex5).
% 298.77/296.98  -b000000011101000000000100001(bitIndex4).
% 298.77/296.98  -b000000011101000000000100001(bitIndex3).
% 298.77/296.98  -b000000011101000000000100001(bitIndex2).
% 298.77/296.98  -b000000011101000000000100001(bitIndex1).
% 298.77/296.98  b000000011101000000000100001(bitIndex0).
% 298.77/296.98  all VarCurr (v1387(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$F)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$F)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$T)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$F)& (v1330(VarCurr,bitIndex0)<->$T)).
% 298.77/296.98  -b000000011001000000000100001(bitIndex26).
% 298.77/296.98  -b000000011001000000000100001(bitIndex25).
% 298.77/296.98  -b000000011001000000000100001(bitIndex24).
% 298.77/296.98  -b000000011001000000000100001(bitIndex23).
% 298.77/296.98  -b000000011001000000000100001(bitIndex22).
% 298.77/296.98  -b000000011001000000000100001(bitIndex21).
% 298.77/296.98  -b000000011001000000000100001(bitIndex20).
% 298.77/296.98  b000000011001000000000100001(bitIndex19).
% 298.77/296.98  b000000011001000000000100001(bitIndex18).
% 298.77/296.98  -b000000011001000000000100001(bitIndex17).
% 298.77/296.98  -b000000011001000000000100001(bitIndex16).
% 298.77/296.98  b000000011001000000000100001(bitIndex15).
% 298.77/296.98  -b000000011001000000000100001(bitIndex14).
% 298.77/296.98  -b000000011001000000000100001(bitIndex13).
% 298.77/296.98  -b000000011001000000000100001(bitIndex12).
% 298.77/296.98  -b000000011001000000000100001(bitIndex11).
% 298.77/296.99  -b000000011001000000000100001(bitIndex10).
% 298.77/296.99  -b000000011001000000000100001(bitIndex9).
% 298.77/296.99  -b000000011001000000000100001(bitIndex8).
% 298.77/296.99  -b000000011001000000000100001(bitIndex7).
% 298.77/296.99  -b000000011001000000000100001(bitIndex6).
% 298.77/296.99  b000000011001000000000100001(bitIndex5).
% 298.77/296.99  -b000000011001000000000100001(bitIndex4).
% 298.77/296.99  -b000000011001000000000100001(bitIndex3).
% 298.77/296.99  -b000000011001000000000100001(bitIndex2).
% 298.77/296.99  -b000000011001000000000100001(bitIndex1).
% 298.77/296.99  b000000011001000000000100001(bitIndex0).
% 298.77/296.99  all VarCurr (-v1359(VarCurr)-> (v1374(VarCurr)<->v1378(VarCurr))).
% 298.77/296.99  all VarCurr (v1359(VarCurr)-> (v1374(VarCurr)<->$F)).
% 298.77/296.99  all VarCurr (-v1361(VarCurr)& -v1363(VarCurr)-> (v1378(VarCurr)<->$F)).
% 298.77/296.99  all VarCurr (v1363(VarCurr)-> (v1378(VarCurr)<->v1380(VarCurr))).
% 298.77/296.99  all VarCurr (v1361(VarCurr)-> (v1378(VarCurr)<->v1379(VarCurr))).
% 298.77/296.99  all VarCurr (v1380(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$T)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$F)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$T)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$F)& (v1330(VarCurr,bitIndex0)<->$F)).
% 298.77/296.99  -b000000011101000000000100000(bitIndex26).
% 298.77/296.99  -b000000011101000000000100000(bitIndex25).
% 298.77/296.99  -b000000011101000000000100000(bitIndex24).
% 298.77/296.99  -b000000011101000000000100000(bitIndex23).
% 298.77/296.99  -b000000011101000000000100000(bitIndex22).
% 298.77/296.99  -b000000011101000000000100000(bitIndex21).
% 298.77/296.99  -b000000011101000000000100000(bitIndex20).
% 298.77/296.99  b000000011101000000000100000(bitIndex19).
% 298.77/296.99  b000000011101000000000100000(bitIndex18).
% 298.77/296.99  b000000011101000000000100000(bitIndex17).
% 298.77/296.99  -b000000011101000000000100000(bitIndex16).
% 298.77/296.99  b000000011101000000000100000(bitIndex15).
% 298.77/296.99  -b000000011101000000000100000(bitIndex14).
% 298.77/296.99  -b000000011101000000000100000(bitIndex13).
% 298.77/296.99  -b000000011101000000000100000(bitIndex12).
% 298.77/296.99  -b000000011101000000000100000(bitIndex11).
% 298.77/296.99  -b000000011101000000000100000(bitIndex10).
% 298.77/296.99  -b000000011101000000000100000(bitIndex9).
% 298.77/296.99  -b000000011101000000000100000(bitIndex8).
% 298.77/296.99  -b000000011101000000000100000(bitIndex7).
% 298.77/296.99  -b000000011101000000000100000(bitIndex6).
% 298.77/296.99  b000000011101000000000100000(bitIndex5).
% 298.77/296.99  -b000000011101000000000100000(bitIndex4).
% 298.77/296.99  -b000000011101000000000100000(bitIndex3).
% 298.77/296.99  -b000000011101000000000100000(bitIndex2).
% 298.77/296.99  -b000000011101000000000100000(bitIndex1).
% 298.77/296.99  -b000000011101000000000100000(bitIndex0).
% 298.77/296.99  all VarCurr (v1379(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$F)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$F)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$T)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$F)& (v1330(VarCurr,bitIndex0)<->$F)).
% 298.77/296.99  -b000000011001000000000100000(bitIndex26).
% 298.77/296.99  -b000000011001000000000100000(bitIndex25).
% 298.77/296.99  -b000000011001000000000100000(bitIndex24).
% 298.77/296.99  -b000000011001000000000100000(bitIndex23).
% 298.77/296.99  -b000000011001000000000100000(bitIndex22).
% 298.77/296.99  -b000000011001000000000100000(bitIndex21).
% 298.77/296.99  -b000000011001000000000100000(bitIndex20).
% 298.77/297.00  b000000011001000000000100000(bitIndex19).
% 298.77/297.00  b000000011001000000000100000(bitIndex18).
% 298.77/297.00  -b000000011001000000000100000(bitIndex17).
% 298.77/297.00  -b000000011001000000000100000(bitIndex16).
% 298.77/297.00  b000000011001000000000100000(bitIndex15).
% 298.77/297.00  -b000000011001000000000100000(bitIndex14).
% 298.77/297.00  -b000000011001000000000100000(bitIndex13).
% 298.77/297.00  -b000000011001000000000100000(bitIndex12).
% 298.77/297.00  -b000000011001000000000100000(bitIndex11).
% 298.77/297.00  -b000000011001000000000100000(bitIndex10).
% 298.77/297.00  -b000000011001000000000100000(bitIndex9).
% 298.77/297.00  -b000000011001000000000100000(bitIndex8).
% 298.77/297.00  -b000000011001000000000100000(bitIndex7).
% 298.77/297.00  -b000000011001000000000100000(bitIndex6).
% 298.77/297.00  b000000011001000000000100000(bitIndex5).
% 298.77/297.00  -b000000011001000000000100000(bitIndex4).
% 298.77/297.00  -b000000011001000000000100000(bitIndex3).
% 298.77/297.00  -b000000011001000000000100000(bitIndex2).
% 298.77/297.00  -b000000011001000000000100000(bitIndex1).
% 298.77/297.00  -b000000011001000000000100000(bitIndex0).
% 298.77/297.00  all VarCurr (-v1359(VarCurr)-> (v1366(VarCurr)<->v1370(VarCurr))).
% 298.77/297.00  all VarCurr (v1359(VarCurr)-> (v1366(VarCurr)<->$F)).
% 298.77/297.00  all VarCurr (-v1361(VarCurr)& -v1363(VarCurr)-> (v1370(VarCurr)<->$F)).
% 298.77/297.00  all VarCurr (v1363(VarCurr)-> (v1370(VarCurr)<->v1372(VarCurr))).
% 298.77/297.00  all VarCurr (v1361(VarCurr)-> (v1370(VarCurr)<->v1371(VarCurr))).
% 298.77/297.00  all VarCurr (v1372(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$T)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$F)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$F)& (v1330(VarCurr,bitIndex0)<->$T)).
% 298.77/297.00  -b000000011101000000000000001(bitIndex26).
% 298.77/297.00  -b000000011101000000000000001(bitIndex25).
% 298.77/297.00  -b000000011101000000000000001(bitIndex24).
% 298.77/297.00  -b000000011101000000000000001(bitIndex23).
% 298.77/297.00  -b000000011101000000000000001(bitIndex22).
% 298.77/297.00  -b000000011101000000000000001(bitIndex21).
% 298.77/297.00  -b000000011101000000000000001(bitIndex20).
% 298.77/297.00  b000000011101000000000000001(bitIndex19).
% 298.77/297.00  b000000011101000000000000001(bitIndex18).
% 298.77/297.00  b000000011101000000000000001(bitIndex17).
% 298.77/297.00  -b000000011101000000000000001(bitIndex16).
% 298.77/297.00  b000000011101000000000000001(bitIndex15).
% 298.77/297.00  -b000000011101000000000000001(bitIndex14).
% 298.77/297.00  -b000000011101000000000000001(bitIndex13).
% 298.77/297.00  -b000000011101000000000000001(bitIndex12).
% 298.77/297.00  -b000000011101000000000000001(bitIndex11).
% 298.77/297.00  -b000000011101000000000000001(bitIndex10).
% 298.77/297.00  -b000000011101000000000000001(bitIndex9).
% 298.77/297.00  -b000000011101000000000000001(bitIndex8).
% 298.77/297.00  -b000000011101000000000000001(bitIndex7).
% 298.77/297.00  -b000000011101000000000000001(bitIndex6).
% 298.77/297.00  -b000000011101000000000000001(bitIndex5).
% 298.77/297.00  -b000000011101000000000000001(bitIndex4).
% 298.77/297.00  -b000000011101000000000000001(bitIndex3).
% 298.77/297.00  -b000000011101000000000000001(bitIndex2).
% 298.77/297.00  -b000000011101000000000000001(bitIndex1).
% 298.77/297.00  b000000011101000000000000001(bitIndex0).
% 298.77/297.00  all VarCurr (v1371(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$F)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$F)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$F)& (v1330(VarCurr,bitIndex0)<->$T)).
% 298.77/297.01  -b000000011001000000000000001(bitIndex26).
% 298.77/297.01  -b000000011001000000000000001(bitIndex25).
% 298.77/297.01  -b000000011001000000000000001(bitIndex24).
% 298.77/297.01  -b000000011001000000000000001(bitIndex23).
% 298.77/297.01  -b000000011001000000000000001(bitIndex22).
% 298.77/297.01  -b000000011001000000000000001(bitIndex21).
% 298.77/297.01  -b000000011001000000000000001(bitIndex20).
% 298.77/297.01  b000000011001000000000000001(bitIndex19).
% 298.77/297.01  b000000011001000000000000001(bitIndex18).
% 298.77/297.01  -b000000011001000000000000001(bitIndex17).
% 298.77/297.01  -b000000011001000000000000001(bitIndex16).
% 298.77/297.01  b000000011001000000000000001(bitIndex15).
% 298.77/297.01  -b000000011001000000000000001(bitIndex14).
% 298.77/297.01  -b000000011001000000000000001(bitIndex13).
% 298.77/297.01  -b000000011001000000000000001(bitIndex12).
% 298.77/297.01  -b000000011001000000000000001(bitIndex11).
% 298.77/297.01  -b000000011001000000000000001(bitIndex10).
% 298.77/297.01  -b000000011001000000000000001(bitIndex9).
% 298.77/297.01  -b000000011001000000000000001(bitIndex8).
% 298.77/297.01  -b000000011001000000000000001(bitIndex7).
% 298.77/297.01  -b000000011001000000000000001(bitIndex6).
% 298.77/297.01  -b000000011001000000000000001(bitIndex5).
% 298.77/297.01  -b000000011001000000000000001(bitIndex4).
% 298.77/297.01  -b000000011001000000000000001(bitIndex3).
% 298.77/297.01  -b000000011001000000000000001(bitIndex2).
% 298.77/297.01  -b000000011001000000000000001(bitIndex1).
% 298.77/297.01  b000000011001000000000000001(bitIndex0).
% 298.77/297.01  all VarCurr (-v1359(VarCurr)-> (v1296(VarCurr)<->v1360(VarCurr))).
% 298.77/297.01  all VarCurr (v1359(VarCurr)-> (v1296(VarCurr)<->$F)).
% 298.77/297.01  all VarCurr (-v1361(VarCurr)& -v1363(VarCurr)-> (v1360(VarCurr)<->$F)).
% 298.77/297.01  all VarCurr (v1363(VarCurr)-> (v1360(VarCurr)<->v1364(VarCurr))).
% 298.77/297.01  all VarCurr (v1361(VarCurr)-> (v1360(VarCurr)<->v1362(VarCurr))).
% 298.77/297.01  all VarCurr (v1364(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$T)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$F)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$F)& (v1330(VarCurr,bitIndex0)<->$F)).
% 298.77/297.01  -b000000011101000000000000000(bitIndex26).
% 298.77/297.01  -b000000011101000000000000000(bitIndex25).
% 298.77/297.01  -b000000011101000000000000000(bitIndex24).
% 298.77/297.01  -b000000011101000000000000000(bitIndex23).
% 298.77/297.01  -b000000011101000000000000000(bitIndex22).
% 298.77/297.01  -b000000011101000000000000000(bitIndex21).
% 298.77/297.01  -b000000011101000000000000000(bitIndex20).
% 298.77/297.01  b000000011101000000000000000(bitIndex19).
% 298.77/297.01  b000000011101000000000000000(bitIndex18).
% 298.77/297.01  b000000011101000000000000000(bitIndex17).
% 298.77/297.01  -b000000011101000000000000000(bitIndex16).
% 298.77/297.01  b000000011101000000000000000(bitIndex15).
% 298.77/297.01  -b000000011101000000000000000(bitIndex14).
% 298.77/297.01  -b000000011101000000000000000(bitIndex13).
% 298.77/297.01  -b000000011101000000000000000(bitIndex12).
% 298.77/297.01  -b000000011101000000000000000(bitIndex11).
% 298.77/297.01  -b000000011101000000000000000(bitIndex10).
% 298.77/297.01  -b000000011101000000000000000(bitIndex9).
% 298.77/297.01  -b000000011101000000000000000(bitIndex8).
% 298.77/297.01  -b000000011101000000000000000(bitIndex7).
% 298.77/297.01  -b000000011101000000000000000(bitIndex6).
% 298.77/297.01  -b000000011101000000000000000(bitIndex5).
% 298.77/297.01  -b000000011101000000000000000(bitIndex4).
% 298.77/297.01  -b000000011101000000000000000(bitIndex3).
% 298.77/297.01  -b000000011101000000000000000(bitIndex2).
% 298.77/297.01  -b000000011101000000000000000(bitIndex1).
% 298.77/297.01  -b000000011101000000000000000(bitIndex0).
% 298.77/297.01  all VarCurr (v1363(VarCurr)<-> (v1320(VarCurr)<->$T)).
% 298.77/297.01  all VarCurr (v1362(VarCurr)<-> (v1330(VarCurr,bitIndex26)<->$F)& (v1330(VarCurr,bitIndex25)<->$F)& (v1330(VarCurr,bitIndex24)<->$F)& (v1330(VarCurr,bitIndex23)<->$F)& (v1330(VarCurr,bitIndex22)<->$F)& (v1330(VarCurr,bitIndex21)<->$F)& (v1330(VarCurr,bitIndex20)<->$F)& (v1330(VarCurr,bitIndex19)<->$T)& (v1330(VarCurr,bitIndex18)<->$T)& (v1330(VarCurr,bitIndex17)<->$F)& (v1330(VarCurr,bitIndex16)<->$F)& (v1330(VarCurr,bitIndex15)<->$T)& (v1330(VarCurr,bitIndex14)<->$F)& (v1330(VarCurr,bitIndex13)<->$F)& (v1330(VarCurr,bitIndex12)<->$F)& (v1330(VarCurr,bitIndex11)<->$F)& (v1330(VarCurr,bitIndex10)<->$F)& (v1330(VarCurr,bitIndex9)<->$F)& (v1330(VarCurr,bitIndex8)<->$F)& (v1330(VarCurr,bitIndex7)<->$F)& (v1330(VarCurr,bitIndex6)<->$F)& (v1330(VarCurr,bitIndex5)<->$F)& (v1330(VarCurr,bitIndex4)<->$F)& (v1330(VarCurr,bitIndex3)<->$F)& (v1330(VarCurr,bitIndex2)<->$F)& (v1330(VarCurr,bitIndex1)<->$F)& (v1330(VarCurr,bitIndex0)<->$F)).
% 298.77/297.02  -b000000011001000000000000000(bitIndex26).
% 298.77/297.02  -b000000011001000000000000000(bitIndex25).
% 298.77/297.02  -b000000011001000000000000000(bitIndex24).
% 298.77/297.02  -b000000011001000000000000000(bitIndex23).
% 298.77/297.02  -b000000011001000000000000000(bitIndex22).
% 298.77/297.02  -b000000011001000000000000000(bitIndex21).
% 298.77/297.02  -b000000011001000000000000000(bitIndex20).
% 298.77/297.02  b000000011001000000000000000(bitIndex19).
% 298.77/297.02  b000000011001000000000000000(bitIndex18).
% 298.77/297.02  -b000000011001000000000000000(bitIndex17).
% 298.77/297.02  -b000000011001000000000000000(bitIndex16).
% 298.77/297.02  b000000011001000000000000000(bitIndex15).
% 298.77/297.02  -b000000011001000000000000000(bitIndex14).
% 298.77/297.02  -b000000011001000000000000000(bitIndex13).
% 298.77/297.02  -b000000011001000000000000000(bitIndex12).
% 298.77/297.02  -b000000011001000000000000000(bitIndex11).
% 298.77/297.02  -b000000011001000000000000000(bitIndex10).
% 298.77/297.02  -b000000011001000000000000000(bitIndex9).
% 298.77/297.02  -b000000011001000000000000000(bitIndex8).
% 298.77/297.02  -b000000011001000000000000000(bitIndex7).
% 298.77/297.02  -b000000011001000000000000000(bitIndex6).
% 298.77/297.02  -b000000011001000000000000000(bitIndex5).
% 298.77/297.02  -b000000011001000000000000000(bitIndex4).
% 298.77/297.02  -b000000011001000000000000000(bitIndex3).
% 298.77/297.02  -b000000011001000000000000000(bitIndex2).
% 298.77/297.02  -b000000011001000000000000000(bitIndex1).
% 298.77/297.02  -b000000011001000000000000000(bitIndex0).
% 298.77/297.02  all VarCurr (v1361(VarCurr)<-> (v1320(VarCurr)<->$F)).
% 298.77/297.02  all VarCurr (-v1359(VarCurr)<->v1298(VarCurr)).
% 298.77/297.02  all VarCurr B (range_26_0(B)-> (v1330(VarCurr,B)<->v1234(VarCurr,B))).
% 298.77/297.02  all VarCurr B (range_26_0(B)-> (v1234(VarCurr,B)<->v1236(VarCurr,B))).
% 298.77/297.02  all VarCurr B (range_26_6(B)-> (v1236(VarCurr,B)<->v1238(VarCurr,B))).
% 298.77/297.02  all VarCurr B (range_26_6(B)-> (v1238(VarCurr,B)<->v1240(VarCurr,B))).
% 298.77/297.02  all VarCurr B (range_26_6(B)-> (v1240(VarCurr,B)<->v1242(VarCurr,B))).
% 298.77/297.02  all VarCurr B (range_26_6(B)-> (v1242(VarCurr,B)<->v1244(VarCurr,B))).
% 298.77/297.02  all VarCurr B (range_26_6(B)-> (v1244(VarCurr,B)<->v1246(VarCurr,B))).
% 298.77/297.02  all VarCurr B (range_26_6(B)-> (v1246(VarCurr,B)<->v1248(VarCurr,B))).
% 298.77/297.02  all VarCurr B (range_26_6(B)-> (v1248(VarCurr,B)<->v1250(VarCurr,B))).
% 298.77/297.02  all VarCurr B (range_26_6(B)-> (v1250(VarCurr,B)<->v1252(VarCurr,B))).
% 298.77/297.02  all B (range_26_6(B)<->bitIndex6=B|bitIndex7=B|bitIndex8=B|bitIndex9=B|bitIndex10=B|bitIndex11=B|bitIndex12=B|bitIndex13=B|bitIndex14=B|bitIndex15=B|bitIndex16=B|bitIndex17=B|bitIndex18=B|bitIndex19=B|bitIndex20=B|bitIndex21=B|bitIndex22=B|bitIndex23=B|bitIndex24=B|bitIndex25=B|bitIndex26=B).
% 298.77/297.02  all VarNext B (range_8_6(B)-> (v1252(VarNext,B)<->v1336(VarNext,B))).
% 298.77/297.02  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1337(VarNext)-> (all B (range_26_0(B)-> (v1336(VarNext,B)<->v1252(VarCurr,B)))))).
% 298.77/297.02  all VarNext (v1337(VarNext)-> (all B (range_26_0(B)-> (v1336(VarNext,B)<->v1353(VarNext,B))))).
% 298.77/297.02  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_26_0(B)-> (v1353(VarNext,B)<->v1351(VarCurr,B))))).
% 298.77/297.02  all VarCurr (-v1348(VarCurr)-> (all B (range_26_0(B)-> (v1351(VarCurr,B)<->v1267(VarCurr,B))))).
% 298.77/297.02  all VarCurr (v1348(VarCurr)-> (all B (range_26_0(B)-> (v1351(VarCurr,B)<->$F)))).
% 298.77/297.02  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1337(VarNext)<->v1338(VarNext)&v1347(VarNext))).
% 298.77/297.02  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1347(VarNext)<->v1345(VarCurr))).
% 298.77/297.02  all VarCurr (v1345(VarCurr)<->v1348(VarCurr)|v1349(VarCurr)).
% 298.77/297.02  all VarCurr (v1349(VarCurr)<->v1259(VarCurr,bitIndex2)&v1350(VarCurr)).
% 298.77/297.02  all VarCurr (-v1350(VarCurr)<->v1348(VarCurr)).
% 298.77/297.02  all VarCurr (-v1348(VarCurr)<->v1254(VarCurr)).
% 298.77/297.02  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1338(VarNext)<->v1339(VarNext)&v1332(VarNext))).
% 298.77/297.02  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1339(VarNext)<->v1341(VarNext))).
% 298.77/297.02  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1341(VarNext)<->v1332(VarCurr))).
% 298.84/297.03  all B (range_26_0(B)-> (v1252(constB0,B)<->$F)).
% 298.84/297.03  all B (range_26_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B|bitIndex6=B|bitIndex7=B|bitIndex8=B|bitIndex9=B|bitIndex10=B|bitIndex11=B|bitIndex12=B|bitIndex13=B|bitIndex14=B|bitIndex15=B|bitIndex16=B|bitIndex17=B|bitIndex18=B|bitIndex19=B|bitIndex20=B|bitIndex21=B|bitIndex22=B|bitIndex23=B|bitIndex24=B|bitIndex25=B|bitIndex26=B).
% 298.84/297.03  -b000000000000000000000000000(bitIndex26).
% 298.84/297.03  -b000000000000000000000000000(bitIndex25).
% 298.84/297.03  -b000000000000000000000000000(bitIndex24).
% 298.84/297.03  -b000000000000000000000000000(bitIndex23).
% 298.84/297.03  -b000000000000000000000000000(bitIndex22).
% 298.84/297.03  -b000000000000000000000000000(bitIndex21).
% 298.84/297.03  -b000000000000000000000000000(bitIndex20).
% 298.84/297.03  -b000000000000000000000000000(bitIndex19).
% 298.84/297.03  -b000000000000000000000000000(bitIndex18).
% 298.84/297.03  -b000000000000000000000000000(bitIndex17).
% 298.84/297.03  -b000000000000000000000000000(bitIndex16).
% 298.84/297.03  -b000000000000000000000000000(bitIndex15).
% 298.84/297.03  -b000000000000000000000000000(bitIndex14).
% 298.84/297.03  -b000000000000000000000000000(bitIndex13).
% 298.84/297.03  -b000000000000000000000000000(bitIndex12).
% 298.84/297.03  -b000000000000000000000000000(bitIndex11).
% 298.84/297.03  -b000000000000000000000000000(bitIndex10).
% 298.84/297.03  -b000000000000000000000000000(bitIndex9).
% 298.84/297.03  -b000000000000000000000000000(bitIndex8).
% 298.84/297.03  -b000000000000000000000000000(bitIndex7).
% 298.84/297.03  -b000000000000000000000000000(bitIndex6).
% 298.84/297.03  -b000000000000000000000000000(bitIndex5).
% 298.84/297.03  -b000000000000000000000000000(bitIndex4).
% 298.84/297.03  -b000000000000000000000000000(bitIndex3).
% 298.84/297.03  -b000000000000000000000000000(bitIndex2).
% 298.84/297.03  -b000000000000000000000000000(bitIndex1).
% 298.84/297.03  -b000000000000000000000000000(bitIndex0).
% 298.84/297.03  all VarCurr (v1332(VarCurr)<->v1334(VarCurr)).
% 298.84/297.03  all VarCurr (v1334(VarCurr)<->v1(VarCurr)).
% 298.84/297.03  all VarCurr B (range_8_6(B)-> (v1267(VarCurr,B)<->v1269(VarCurr,B))).
% 298.84/297.03  all VarCurr B (range_8_6(B)-> (v1269(VarCurr,B)<->v1271(VarCurr,B))).
% 298.84/297.03  all B (range_8_6(B)<->bitIndex6=B|bitIndex7=B|bitIndex8=B).
% 298.84/297.03  all VarCurr (v1320(VarCurr)<->v1322(VarCurr)).
% 298.84/297.03  all VarCurr (v1322(VarCurr)<->v1324(VarCurr)).
% 298.84/297.03  all VarCurr (v1324(VarCurr)<->v1326(VarCurr)).
% 298.84/297.03  all VarCurr (v1326(VarCurr)<->v1328(VarCurr)).
% 298.84/297.03  all VarCurr (v1298(VarCurr)<->v1300(VarCurr)).
% 298.84/297.03  all VarCurr (v1300(VarCurr)<->v1302(VarCurr)).
% 298.84/297.03  all VarCurr (v1302(VarCurr)<->v1304(VarCurr)).
% 298.84/297.03  all VarCurr (v1304(VarCurr)<->v1306(VarCurr)).
% 298.84/297.03  all VarCurr (v1306(VarCurr)<->v1308(VarCurr)).
% 298.84/297.03  all VarCurr (v1308(VarCurr)<->v1310(VarCurr)).
% 298.84/297.03  all VarCurr (v1310(VarCurr)<->v1312(VarCurr)).
% 298.84/297.03  all VarCurr (v1312(VarCurr)<->v1314(VarCurr)).
% 298.84/297.03  all VarCurr (v1314(VarCurr)<->v1316(VarCurr)).
% 298.84/297.03  all VarCurr (v1316(VarCurr)<->v1318(VarCurr)).
% 298.84/297.03  v1318(constB0)<->$F.
% 298.84/297.03  all VarCurr ((v1265(VarCurr,bitIndex2)<->v1267(VarCurr,bitIndex31))& (v1265(VarCurr,bitIndex1)<->v1267(VarCurr,bitIndex30))& (v1265(VarCurr,bitIndex0)<->v1267(VarCurr,bitIndex29))).
% 298.84/297.03  all VarCurr B (range_31_29(B)-> (v1267(VarCurr,B)<->v1269(VarCurr,B))).
% 298.84/297.03  all VarCurr B (range_31_29(B)-> (v1269(VarCurr,B)<->v1271(VarCurr,B))).
% 298.84/297.03  all B (range_31_29(B)<->bitIndex29=B|bitIndex30=B|bitIndex31=B).
% 298.84/297.03  all B (range_31_0(B)-> (v1271(constB0,B)<->$F)).
% 298.84/297.03  all B (range_31_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B|bitIndex6=B|bitIndex7=B|bitIndex8=B|bitIndex9=B|bitIndex10=B|bitIndex11=B|bitIndex12=B|bitIndex13=B|bitIndex14=B|bitIndex15=B|bitIndex16=B|bitIndex17=B|bitIndex18=B|bitIndex19=B|bitIndex20=B|bitIndex21=B|bitIndex22=B|bitIndex23=B|bitIndex24=B|bitIndex25=B|bitIndex26=B|bitIndex27=B|bitIndex28=B|bitIndex29=B|bitIndex30=B|bitIndex31=B).
% 298.84/297.03  -b00000000000000000000000000000000(bitIndex31).
% 298.84/297.03  -b00000000000000000000000000000000(bitIndex30).
% 298.84/297.03  -b00000000000000000000000000000000(bitIndex29).
% 298.84/297.03  -b00000000000000000000000000000000(bitIndex28).
% 298.84/297.03  -b00000000000000000000000000000000(bitIndex27).
% 298.84/297.03  -b00000000000000000000000000000000(bitIndex26).
% 298.84/297.03  -b00000000000000000000000000000000(bitIndex25).
% 298.84/297.03  -b00000000000000000000000000000000(bitIndex24).
% 298.84/297.03  -b00000000000000000000000000000000(bitIndex23).
% 298.84/297.03  -b00000000000000000000000000000000(bitIndex22).
% 298.84/297.03  -b00000000000000000000000000000000(bitIndex21).
% 298.84/297.03  -b00000000000000000000000000000000(bitIndex20).
% 298.84/297.03  -b00000000000000000000000000000000(bitIndex19).
% 298.84/297.03  -b00000000000000000000000000000000(bitIndex18).
% 298.84/297.05  -b00000000000000000000000000000000(bitIndex17).
% 298.84/297.05  -b00000000000000000000000000000000(bitIndex16).
% 298.84/297.05  -b00000000000000000000000000000000(bitIndex15).
% 298.84/297.05  -b00000000000000000000000000000000(bitIndex14).
% 298.84/297.05  -b00000000000000000000000000000000(bitIndex13).
% 298.84/297.05  -b00000000000000000000000000000000(bitIndex12).
% 298.84/297.05  -b00000000000000000000000000000000(bitIndex11).
% 298.84/297.05  -b00000000000000000000000000000000(bitIndex10).
% 298.84/297.05  -b00000000000000000000000000000000(bitIndex9).
% 298.84/297.05  -b00000000000000000000000000000000(bitIndex8).
% 298.84/297.05  -b00000000000000000000000000000000(bitIndex7).
% 298.84/297.05  -b00000000000000000000000000000000(bitIndex6).
% 298.84/297.05  -b00000000000000000000000000000000(bitIndex5).
% 298.84/297.05  -b00000000000000000000000000000000(bitIndex4).
% 298.84/297.05  -b00000000000000000000000000000000(bitIndex3).
% 298.84/297.05  -b00000000000000000000000000000000(bitIndex2).
% 298.84/297.05  -b00000000000000000000000000000000(bitIndex1).
% 298.84/297.05  -b00000000000000000000000000000000(bitIndex0).
% 298.84/297.05  all VarCurr (v1254(VarCurr)<->v1256(VarCurr)).
% 298.84/297.05  all VarCurr (v1256(VarCurr)<->v230(VarCurr)).
% 298.84/297.05  all VarCurr B (range_5_0(B)-> (v978(VarCurr,B)<->v980(VarCurr,B))).
% 298.84/297.05  all VarCurr B (range_5_0(B)-> (v980(VarCurr,B)<->v982(VarCurr,B))).
% 298.84/297.05  all VarCurr B (range_5_0(B)-> (v982(VarCurr,B)<->v984(VarCurr,B))).
% 298.84/297.05  all VarCurr B (range_5_0(B)-> (v984(VarCurr,B)<->v986(VarCurr,B))).
% 298.84/297.05  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1184(VarNext)-> (all B (range_5_0(B)-> (v986(VarNext,B)<->v986(VarCurr,B)))))).
% 298.84/297.05  all VarNext (v1184(VarNext)-> (all B (range_5_0(B)-> (v986(VarNext,B)<->v1198(VarNext,B))))).
% 298.84/297.05  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_5_0(B)-> (v1198(VarNext,B)<->v1196(VarCurr,B))))).
% 298.84/297.05  all VarCurr (-v1193(VarCurr)-> (all B (range_5_0(B)-> (v1196(VarCurr,B)<->v993(VarCurr,B))))).
% 298.84/297.05  all VarCurr (v1193(VarCurr)-> (all B (range_5_0(B)-> (v1196(VarCurr,B)<->$F)))).
% 298.84/297.05  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1184(VarNext)<->v1185(VarNext)&v1192(VarNext))).
% 298.84/297.05  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1192(VarNext)<->v1190(VarCurr))).
% 298.84/297.05  all VarCurr (v1190(VarCurr)<->v1193(VarCurr)|v1194(VarCurr)).
% 298.84/297.05  all VarCurr (v1194(VarCurr)<->v989(VarCurr)&v1195(VarCurr)).
% 298.84/297.05  all VarCurr (-v1195(VarCurr)<->v1193(VarCurr)).
% 298.84/297.05  all VarCurr (-v1193(VarCurr)<->v224(VarCurr)).
% 298.84/297.05  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1185(VarNext)<->v1186(VarNext)&v270(VarNext))).
% 298.84/297.05  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1186(VarNext)<->v281(VarNext))).
% 298.84/297.05  all B (range_5_0(B)-> (v986(constB0,B)<->$F)).
% 298.84/297.05  all VarCurr B (range_5_0(B)-> (v993(VarCurr,B)<->v995(VarCurr,B))).
% 298.84/297.05  all VarCurr B (range_5_0(B)-> (v995(VarCurr,B)<->v997(VarCurr,B))).
% 298.84/297.05  all VarCurr B (range_5_0(B)-> (v997(VarCurr,B)<->v999(VarCurr,B))).
% 298.84/297.05  all VarCurr (-v1001(VarCurr)-> (v999(VarCurr,bitIndex5)<->v1101(VarCurr,bitIndex11))& (v999(VarCurr,bitIndex4)<->v1101(VarCurr,bitIndex10))& (v999(VarCurr,bitIndex3)<->v1101(VarCurr,bitIndex9))& (v999(VarCurr,bitIndex2)<->v1101(VarCurr,bitIndex8))& (v999(VarCurr,bitIndex1)<->v1101(VarCurr,bitIndex7))& (v999(VarCurr,bitIndex0)<->v1101(VarCurr,bitIndex6))).
% 298.84/297.05  all VarCurr (v1001(VarCurr)-> (all B (range_5_0(B)-> (v999(VarCurr,B)<->v1103(VarCurr,B))))).
% 298.84/297.05  all VarCurr (v1001(VarCurr)<->v1003(VarCurr)&v1180(VarCurr)).
% 298.84/297.05  all VarCurr (-v1180(VarCurr)<->v1099(VarCurr)).
% 298.84/297.05  all VarCurr (v1099(VarCurr)<-> (v1101(VarCurr,bitIndex11)<->v1103(VarCurr,bitIndex5))& (v1101(VarCurr,bitIndex10)<->v1103(VarCurr,bitIndex4))& (v1101(VarCurr,bitIndex9)<->v1103(VarCurr,bitIndex3))& (v1101(VarCurr,bitIndex8)<->v1103(VarCurr,bitIndex2))& (v1101(VarCurr,bitIndex7)<->v1103(VarCurr,bitIndex1))& (v1101(VarCurr,bitIndex6)<->v1103(VarCurr,bitIndex0))).
% 298.84/297.05  v1101(constB0,bitIndex11)<->$F.
% 298.84/297.05  v1101(constB0,bitIndex10)<->$F.
% 298.84/297.05  v1101(constB0,bitIndex9)<->$F.
% 298.84/297.05  v1101(constB0,bitIndex8)<->$F.
% 298.84/297.05  v1101(constB0,bitIndex7)<->$F.
% 298.84/297.05  v1101(constB0,bitIndex6)<->$F.
% 298.84/297.05  all B (range_5_0(B)-> (v1101(constB0,B)<->$F)).
% 298.84/297.05  all VarCurr B (range_5_0(B)-> (v1103(VarCurr,B)<->v1105(VarCurr,B))).
% 298.84/297.05  all VarCurr B (range_5_0(B)-> (v1105(VarCurr,B)<->v1107(VarCurr,B))).
% 298.84/297.05  all VarCurr B (range_5_0(B)-> (v1107(VarCurr,B)<->v1109(VarCurr,B))).
% 298.84/297.05  all VarCurr B (range_5_0(B)-> (v1109(VarCurr,B)<->v1111(VarCurr,B))).
% 298.84/297.05  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)& -v1113(VarCurr,bitIndex20)& -v1113(VarCurr,bitIndex19)& -v1113(VarCurr,bitIndex18)& -v1113(VarCurr,bitIndex17)& -v1113(VarCurr,bitIndex16)& -v1113(VarCurr,bitIndex15)& -v1113(VarCurr,bitIndex14)& -v1113(VarCurr,bitIndex13)& -v1113(VarCurr,bitIndex12)& -v1113(VarCurr,bitIndex11)& -v1113(VarCurr,bitIndex10)& -v1113(VarCurr,bitIndex9)& -v1113(VarCurr,bitIndex8)& -v1113(VarCurr,bitIndex7)& -v1113(VarCurr,bitIndex6)& -v1113(VarCurr,bitIndex5)& -v1113(VarCurr,bitIndex4)& -v1113(VarCurr,bitIndex3)& -v1113(VarCurr,bitIndex2)& -v1113(VarCurr,bitIndex1)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->$F)))).
% 298.88/297.07  -b000000(bitIndex5).
% 298.88/297.07  -b000000(bitIndex4).
% 298.88/297.07  -b000000(bitIndex3).
% 298.88/297.07  -b000000(bitIndex2).
% 298.88/297.07  -b000000(bitIndex1).
% 298.88/297.07  -b000000(bitIndex0).
% 298.88/297.07  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)& -v1113(VarCurr,bitIndex20)& -v1113(VarCurr,bitIndex19)& -v1113(VarCurr,bitIndex18)& -v1113(VarCurr,bitIndex17)& -v1113(VarCurr,bitIndex16)& -v1113(VarCurr,bitIndex15)& -v1113(VarCurr,bitIndex14)& -v1113(VarCurr,bitIndex13)& -v1113(VarCurr,bitIndex12)& -v1113(VarCurr,bitIndex11)& -v1113(VarCurr,bitIndex10)& -v1113(VarCurr,bitIndex9)& -v1113(VarCurr,bitIndex8)& -v1113(VarCurr,bitIndex7)& -v1113(VarCurr,bitIndex6)& -v1113(VarCurr,bitIndex5)& -v1113(VarCurr,bitIndex4)& -v1113(VarCurr,bitIndex3)& -v1113(VarCurr,bitIndex2)&v1113(VarCurr,bitIndex1)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b000001(B))))).
% 298.88/297.07  -b000001(bitIndex5).
% 298.88/297.07  -b000001(bitIndex4).
% 298.88/297.07  -b000001(bitIndex3).
% 298.88/297.07  -b000001(bitIndex2).
% 298.88/297.07  -b000001(bitIndex1).
% 298.88/297.07  b000001(bitIndex0).
% 298.88/297.07  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)& -v1113(VarCurr,bitIndex20)& -v1113(VarCurr,bitIndex19)& -v1113(VarCurr,bitIndex18)& -v1113(VarCurr,bitIndex17)& -v1113(VarCurr,bitIndex16)& -v1113(VarCurr,bitIndex15)& -v1113(VarCurr,bitIndex14)& -v1113(VarCurr,bitIndex13)& -v1113(VarCurr,bitIndex12)& -v1113(VarCurr,bitIndex11)& -v1113(VarCurr,bitIndex10)& -v1113(VarCurr,bitIndex9)& -v1113(VarCurr,bitIndex8)& -v1113(VarCurr,bitIndex7)& -v1113(VarCurr,bitIndex6)& -v1113(VarCurr,bitIndex5)& -v1113(VarCurr,bitIndex4)& -v1113(VarCurr,bitIndex3)&v1113(VarCurr,bitIndex2)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b000010(B))))).
% 298.88/297.09  -b000010(bitIndex5).
% 298.88/297.09  -b000010(bitIndex4).
% 298.88/297.09  -b000010(bitIndex3).
% 298.88/297.09  -b000010(bitIndex2).
% 298.88/297.09  b000010(bitIndex1).
% 298.88/297.09  -b000010(bitIndex0).
% 298.88/297.09  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)& -v1113(VarCurr,bitIndex20)& -v1113(VarCurr,bitIndex19)& -v1113(VarCurr,bitIndex18)& -v1113(VarCurr,bitIndex17)& -v1113(VarCurr,bitIndex16)& -v1113(VarCurr,bitIndex15)& -v1113(VarCurr,bitIndex14)& -v1113(VarCurr,bitIndex13)& -v1113(VarCurr,bitIndex12)& -v1113(VarCurr,bitIndex11)& -v1113(VarCurr,bitIndex10)& -v1113(VarCurr,bitIndex9)& -v1113(VarCurr,bitIndex8)& -v1113(VarCurr,bitIndex7)& -v1113(VarCurr,bitIndex6)& -v1113(VarCurr,bitIndex5)& -v1113(VarCurr,bitIndex4)&v1113(VarCurr,bitIndex3)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b000011(B))))).
% 298.88/297.09  -b000011(bitIndex5).
% 298.88/297.09  -b000011(bitIndex4).
% 298.88/297.09  -b000011(bitIndex3).
% 298.88/297.09  -b000011(bitIndex2).
% 298.88/297.09  b000011(bitIndex1).
% 298.88/297.09  b000011(bitIndex0).
% 298.88/297.09  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)& -v1113(VarCurr,bitIndex20)& -v1113(VarCurr,bitIndex19)& -v1113(VarCurr,bitIndex18)& -v1113(VarCurr,bitIndex17)& -v1113(VarCurr,bitIndex16)& -v1113(VarCurr,bitIndex15)& -v1113(VarCurr,bitIndex14)& -v1113(VarCurr,bitIndex13)& -v1113(VarCurr,bitIndex12)& -v1113(VarCurr,bitIndex11)& -v1113(VarCurr,bitIndex10)& -v1113(VarCurr,bitIndex9)& -v1113(VarCurr,bitIndex8)& -v1113(VarCurr,bitIndex7)& -v1113(VarCurr,bitIndex6)& -v1113(VarCurr,bitIndex5)&v1113(VarCurr,bitIndex4)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b000100(B))))).
% 298.88/297.10  -b000100(bitIndex5).
% 298.88/297.10  -b000100(bitIndex4).
% 298.88/297.10  -b000100(bitIndex3).
% 298.88/297.10  b000100(bitIndex2).
% 298.88/297.10  -b000100(bitIndex1).
% 298.88/297.10  -b000100(bitIndex0).
% 298.88/297.10  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)& -v1113(VarCurr,bitIndex20)& -v1113(VarCurr,bitIndex19)& -v1113(VarCurr,bitIndex18)& -v1113(VarCurr,bitIndex17)& -v1113(VarCurr,bitIndex16)& -v1113(VarCurr,bitIndex15)& -v1113(VarCurr,bitIndex14)& -v1113(VarCurr,bitIndex13)& -v1113(VarCurr,bitIndex12)& -v1113(VarCurr,bitIndex11)& -v1113(VarCurr,bitIndex10)& -v1113(VarCurr,bitIndex9)& -v1113(VarCurr,bitIndex8)& -v1113(VarCurr,bitIndex7)& -v1113(VarCurr,bitIndex6)&v1113(VarCurr,bitIndex5)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b000101(B))))).
% 298.88/297.10  -b000101(bitIndex5).
% 298.88/297.10  -b000101(bitIndex4).
% 298.88/297.10  -b000101(bitIndex3).
% 298.88/297.10  b000101(bitIndex2).
% 298.88/297.10  -b000101(bitIndex1).
% 298.88/297.10  b000101(bitIndex0).
% 298.88/297.10  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)& -v1113(VarCurr,bitIndex20)& -v1113(VarCurr,bitIndex19)& -v1113(VarCurr,bitIndex18)& -v1113(VarCurr,bitIndex17)& -v1113(VarCurr,bitIndex16)& -v1113(VarCurr,bitIndex15)& -v1113(VarCurr,bitIndex14)& -v1113(VarCurr,bitIndex13)& -v1113(VarCurr,bitIndex12)& -v1113(VarCurr,bitIndex11)& -v1113(VarCurr,bitIndex10)& -v1113(VarCurr,bitIndex9)& -v1113(VarCurr,bitIndex8)& -v1113(VarCurr,bitIndex7)&v1113(VarCurr,bitIndex6)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b000110(B))))).
% 298.88/297.13  -b000110(bitIndex5).
% 298.88/297.13  -b000110(bitIndex4).
% 298.88/297.13  -b000110(bitIndex3).
% 298.88/297.13  b000110(bitIndex2).
% 298.88/297.13  b000110(bitIndex1).
% 298.88/297.13  -b000110(bitIndex0).
% 298.88/297.13  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)& -v1113(VarCurr,bitIndex20)& -v1113(VarCurr,bitIndex19)& -v1113(VarCurr,bitIndex18)& -v1113(VarCurr,bitIndex17)& -v1113(VarCurr,bitIndex16)& -v1113(VarCurr,bitIndex15)& -v1113(VarCurr,bitIndex14)& -v1113(VarCurr,bitIndex13)& -v1113(VarCurr,bitIndex12)& -v1113(VarCurr,bitIndex11)& -v1113(VarCurr,bitIndex10)& -v1113(VarCurr,bitIndex9)& -v1113(VarCurr,bitIndex8)&v1113(VarCurr,bitIndex7)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b000111(B))))).
% 298.88/297.13  -b000111(bitIndex5).
% 298.88/297.13  -b000111(bitIndex4).
% 298.88/297.13  -b000111(bitIndex3).
% 298.88/297.13  b000111(bitIndex2).
% 298.88/297.13  b000111(bitIndex1).
% 298.88/297.13  b000111(bitIndex0).
% 298.88/297.13  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)& -v1113(VarCurr,bitIndex20)& -v1113(VarCurr,bitIndex19)& -v1113(VarCurr,bitIndex18)& -v1113(VarCurr,bitIndex17)& -v1113(VarCurr,bitIndex16)& -v1113(VarCurr,bitIndex15)& -v1113(VarCurr,bitIndex14)& -v1113(VarCurr,bitIndex13)& -v1113(VarCurr,bitIndex12)& -v1113(VarCurr,bitIndex11)& -v1113(VarCurr,bitIndex10)& -v1113(VarCurr,bitIndex9)&v1113(VarCurr,bitIndex8)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b001000(B))))).
% 298.96/297.14  -b001000(bitIndex5).
% 298.96/297.14  -b001000(bitIndex4).
% 298.96/297.14  b001000(bitIndex3).
% 298.96/297.14  -b001000(bitIndex2).
% 298.96/297.14  -b001000(bitIndex1).
% 298.96/297.14  -b001000(bitIndex0).
% 298.96/297.14  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)& -v1113(VarCurr,bitIndex20)& -v1113(VarCurr,bitIndex19)& -v1113(VarCurr,bitIndex18)& -v1113(VarCurr,bitIndex17)& -v1113(VarCurr,bitIndex16)& -v1113(VarCurr,bitIndex15)& -v1113(VarCurr,bitIndex14)& -v1113(VarCurr,bitIndex13)& -v1113(VarCurr,bitIndex12)& -v1113(VarCurr,bitIndex11)& -v1113(VarCurr,bitIndex10)&v1113(VarCurr,bitIndex9)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b001001(B))))).
% 298.96/297.14  -b001001(bitIndex5).
% 298.96/297.14  -b001001(bitIndex4).
% 298.96/297.14  b001001(bitIndex3).
% 298.96/297.14  -b001001(bitIndex2).
% 298.96/297.14  -b001001(bitIndex1).
% 298.96/297.14  b001001(bitIndex0).
% 298.96/297.14  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)& -v1113(VarCurr,bitIndex20)& -v1113(VarCurr,bitIndex19)& -v1113(VarCurr,bitIndex18)& -v1113(VarCurr,bitIndex17)& -v1113(VarCurr,bitIndex16)& -v1113(VarCurr,bitIndex15)& -v1113(VarCurr,bitIndex14)& -v1113(VarCurr,bitIndex13)& -v1113(VarCurr,bitIndex12)& -v1113(VarCurr,bitIndex11)&v1113(VarCurr,bitIndex10)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b001010(B))))).
% 298.96/297.14  -b001010(bitIndex5).
% 298.96/297.14  -b001010(bitIndex4).
% 298.96/297.14  b001010(bitIndex3).
% 298.96/297.14  -b001010(bitIndex2).
% 298.96/297.14  b001010(bitIndex1).
% 298.96/297.14  -b001010(bitIndex0).
% 298.96/297.14  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)& -v1113(VarCurr,bitIndex20)& -v1113(VarCurr,bitIndex19)& -v1113(VarCurr,bitIndex18)& -v1113(VarCurr,bitIndex17)& -v1113(VarCurr,bitIndex16)& -v1113(VarCurr,bitIndex15)& -v1113(VarCurr,bitIndex14)& -v1113(VarCurr,bitIndex13)& -v1113(VarCurr,bitIndex12)&v1113(VarCurr,bitIndex11)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b001011(B))))).
% 298.96/297.17  -b001011(bitIndex5).
% 298.96/297.17  -b001011(bitIndex4).
% 298.96/297.17  b001011(bitIndex3).
% 298.96/297.17  -b001011(bitIndex2).
% 298.96/297.17  b001011(bitIndex1).
% 298.96/297.17  b001011(bitIndex0).
% 298.96/297.17  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)& -v1113(VarCurr,bitIndex20)& -v1113(VarCurr,bitIndex19)& -v1113(VarCurr,bitIndex18)& -v1113(VarCurr,bitIndex17)& -v1113(VarCurr,bitIndex16)& -v1113(VarCurr,bitIndex15)& -v1113(VarCurr,bitIndex14)& -v1113(VarCurr,bitIndex13)&v1113(VarCurr,bitIndex12)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b001100(B))))).
% 298.96/297.17  -b001100(bitIndex5).
% 298.96/297.17  -b001100(bitIndex4).
% 298.96/297.17  b001100(bitIndex3).
% 298.96/297.17  b001100(bitIndex2).
% 298.96/297.17  -b001100(bitIndex1).
% 298.96/297.17  -b001100(bitIndex0).
% 298.96/297.17  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)& -v1113(VarCurr,bitIndex20)& -v1113(VarCurr,bitIndex19)& -v1113(VarCurr,bitIndex18)& -v1113(VarCurr,bitIndex17)& -v1113(VarCurr,bitIndex16)& -v1113(VarCurr,bitIndex15)& -v1113(VarCurr,bitIndex14)&v1113(VarCurr,bitIndex13)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b001101(B))))).
% 299.00/297.18  -b001101(bitIndex5).
% 299.00/297.18  -b001101(bitIndex4).
% 299.00/297.18  b001101(bitIndex3).
% 299.00/297.18  b001101(bitIndex2).
% 299.00/297.18  -b001101(bitIndex1).
% 299.00/297.18  b001101(bitIndex0).
% 299.00/297.18  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)& -v1113(VarCurr,bitIndex20)& -v1113(VarCurr,bitIndex19)& -v1113(VarCurr,bitIndex18)& -v1113(VarCurr,bitIndex17)& -v1113(VarCurr,bitIndex16)& -v1113(VarCurr,bitIndex15)&v1113(VarCurr,bitIndex14)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b001110(B))))).
% 299.00/297.18  -b001110(bitIndex5).
% 299.00/297.18  -b001110(bitIndex4).
% 299.00/297.18  b001110(bitIndex3).
% 299.00/297.18  b001110(bitIndex2).
% 299.00/297.18  b001110(bitIndex1).
% 299.00/297.18  -b001110(bitIndex0).
% 299.00/297.18  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)& -v1113(VarCurr,bitIndex20)& -v1113(VarCurr,bitIndex19)& -v1113(VarCurr,bitIndex18)& -v1113(VarCurr,bitIndex17)& -v1113(VarCurr,bitIndex16)&v1113(VarCurr,bitIndex15)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b001111(B))))).
% 299.00/297.18  -b001111(bitIndex5).
% 299.00/297.18  -b001111(bitIndex4).
% 299.00/297.18  b001111(bitIndex3).
% 299.00/297.18  b001111(bitIndex2).
% 299.00/297.18  b001111(bitIndex1).
% 299.00/297.18  b001111(bitIndex0).
% 299.00/297.18  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)& -v1113(VarCurr,bitIndex20)& -v1113(VarCurr,bitIndex19)& -v1113(VarCurr,bitIndex18)& -v1113(VarCurr,bitIndex17)&v1113(VarCurr,bitIndex16)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b010000(B))))).
% 299.00/297.20  -b010000(bitIndex5).
% 299.00/297.20  b010000(bitIndex4).
% 299.00/297.20  -b010000(bitIndex3).
% 299.00/297.20  -b010000(bitIndex2).
% 299.00/297.20  -b010000(bitIndex1).
% 299.00/297.20  -b010000(bitIndex0).
% 299.00/297.20  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)& -v1113(VarCurr,bitIndex20)& -v1113(VarCurr,bitIndex19)& -v1113(VarCurr,bitIndex18)&v1113(VarCurr,bitIndex17)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b010001(B))))).
% 299.00/297.20  -b010001(bitIndex5).
% 299.00/297.20  b010001(bitIndex4).
% 299.00/297.20  -b010001(bitIndex3).
% 299.00/297.20  -b010001(bitIndex2).
% 299.00/297.20  -b010001(bitIndex1).
% 299.00/297.20  b010001(bitIndex0).
% 299.00/297.20  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)& -v1113(VarCurr,bitIndex20)& -v1113(VarCurr,bitIndex19)&v1113(VarCurr,bitIndex18)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b010010(B))))).
% 299.00/297.20  -b010010(bitIndex5).
% 299.00/297.20  b010010(bitIndex4).
% 299.00/297.20  -b010010(bitIndex3).
% 299.00/297.20  -b010010(bitIndex2).
% 299.00/297.22  b010010(bitIndex1).
% 299.00/297.22  -b010010(bitIndex0).
% 299.00/297.22  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)& -v1113(VarCurr,bitIndex20)&v1113(VarCurr,bitIndex19)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b010011(B))))).
% 299.00/297.22  -b010011(bitIndex5).
% 299.00/297.22  b010011(bitIndex4).
% 299.00/297.22  -b010011(bitIndex3).
% 299.00/297.22  -b010011(bitIndex2).
% 299.00/297.22  b010011(bitIndex1).
% 299.00/297.22  b010011(bitIndex0).
% 299.00/297.22  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)& -v1113(VarCurr,bitIndex21)&v1113(VarCurr,bitIndex20)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b010100(B))))).
% 299.00/297.22  -b010100(bitIndex5).
% 299.00/297.22  b010100(bitIndex4).
% 299.00/297.22  -b010100(bitIndex3).
% 299.00/297.22  b010100(bitIndex2).
% 299.00/297.22  -b010100(bitIndex1).
% 299.00/297.22  -b010100(bitIndex0).
% 299.00/297.22  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)& -v1113(VarCurr,bitIndex22)&v1113(VarCurr,bitIndex21)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b010101(B))))).
% 299.05/297.24  -b010101(bitIndex5).
% 299.05/297.24  b010101(bitIndex4).
% 299.05/297.24  -b010101(bitIndex3).
% 299.05/297.24  b010101(bitIndex2).
% 299.05/297.24  -b010101(bitIndex1).
% 299.05/297.24  b010101(bitIndex0).
% 299.05/297.24  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)& -v1113(VarCurr,bitIndex23)&v1113(VarCurr,bitIndex22)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b010110(B))))).
% 299.05/297.24  -b010110(bitIndex5).
% 299.05/297.24  b010110(bitIndex4).
% 299.05/297.24  -b010110(bitIndex3).
% 299.05/297.24  b010110(bitIndex2).
% 299.05/297.24  b010110(bitIndex1).
% 299.05/297.24  -b010110(bitIndex0).
% 299.05/297.24  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)& -v1113(VarCurr,bitIndex24)&v1113(VarCurr,bitIndex23)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b010111(B))))).
% 299.05/297.24  -b010111(bitIndex5).
% 299.05/297.24  b010111(bitIndex4).
% 299.05/297.24  -b010111(bitIndex3).
% 299.05/297.24  b010111(bitIndex2).
% 299.05/297.24  b010111(bitIndex1).
% 299.05/297.24  b010111(bitIndex0).
% 299.05/297.24  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)& -v1113(VarCurr,bitIndex25)&v1113(VarCurr,bitIndex24)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b011000(B))))).
% 299.07/297.26  -b011000(bitIndex5).
% 299.07/297.26  b011000(bitIndex4).
% 299.07/297.26  b011000(bitIndex3).
% 299.07/297.26  -b011000(bitIndex2).
% 299.07/297.26  -b011000(bitIndex1).
% 299.07/297.26  -b011000(bitIndex0).
% 299.07/297.26  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)& -v1113(VarCurr,bitIndex26)&v1113(VarCurr,bitIndex25)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b011001(B))))).
% 299.07/297.26  -b011001(bitIndex5).
% 299.07/297.26  b011001(bitIndex4).
% 299.07/297.26  b011001(bitIndex3).
% 299.07/297.26  -b011001(bitIndex2).
% 299.07/297.26  -b011001(bitIndex1).
% 299.07/297.26  b011001(bitIndex0).
% 299.07/297.26  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)& -v1113(VarCurr,bitIndex27)&v1113(VarCurr,bitIndex26)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b011010(B))))).
% 299.07/297.26  -b011010(bitIndex5).
% 299.07/297.26  b011010(bitIndex4).
% 299.07/297.26  b011010(bitIndex3).
% 299.07/297.26  -b011010(bitIndex2).
% 299.07/297.26  b011010(bitIndex1).
% 299.07/297.26  -b011010(bitIndex0).
% 299.07/297.26  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)& -v1113(VarCurr,bitIndex28)&v1113(VarCurr,bitIndex27)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b011011(B))))).
% 299.07/297.26  -b011011(bitIndex5).
% 299.07/297.26  b011011(bitIndex4).
% 299.07/297.26  b011011(bitIndex3).
% 299.07/297.26  -b011011(bitIndex2).
% 299.07/297.26  b011011(bitIndex1).
% 299.07/297.26  b011011(bitIndex0).
% 299.07/297.26  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)& -v1113(VarCurr,bitIndex29)&v1113(VarCurr,bitIndex28)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b011100(B))))).
% 299.07/297.28  -b011100(bitIndex5).
% 299.07/297.28  b011100(bitIndex4).
% 299.07/297.28  b011100(bitIndex3).
% 299.07/297.28  b011100(bitIndex2).
% 299.07/297.28  -b011100(bitIndex1).
% 299.07/297.28  -b011100(bitIndex0).
% 299.07/297.28  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)& -v1113(VarCurr,bitIndex30)&v1113(VarCurr,bitIndex29)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b011101(B))))).
% 299.07/297.28  -b011101(bitIndex5).
% 299.07/297.28  b011101(bitIndex4).
% 299.07/297.28  b011101(bitIndex3).
% 299.07/297.28  b011101(bitIndex2).
% 299.07/297.28  -b011101(bitIndex1).
% 299.07/297.28  b011101(bitIndex0).
% 299.07/297.28  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)& -v1113(VarCurr,bitIndex31)&v1113(VarCurr,bitIndex30)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b011110(B))))).
% 299.07/297.28  -b011110(bitIndex5).
% 299.07/297.28  b011110(bitIndex4).
% 299.07/297.28  b011110(bitIndex3).
% 299.07/297.28  b011110(bitIndex2).
% 299.07/297.28  b011110(bitIndex1).
% 299.07/297.28  -b011110(bitIndex0).
% 299.07/297.28  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)& -v1113(VarCurr,bitIndex32)&v1113(VarCurr,bitIndex31)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b011111(B))))).
% 299.07/297.29  -b011111(bitIndex5).
% 299.07/297.29  b011111(bitIndex4).
% 299.07/297.29  b011111(bitIndex3).
% 299.07/297.29  b011111(bitIndex2).
% 299.07/297.29  b011111(bitIndex1).
% 299.07/297.29  b011111(bitIndex0).
% 299.07/297.29  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)& -v1113(VarCurr,bitIndex33)&v1113(VarCurr,bitIndex32)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b100000(B))))).
% 299.07/297.29  b100000(bitIndex5).
% 299.07/297.29  -b100000(bitIndex4).
% 299.07/297.29  -b100000(bitIndex3).
% 299.07/297.29  -b100000(bitIndex2).
% 299.07/297.29  -b100000(bitIndex1).
% 299.07/297.29  -b100000(bitIndex0).
% 299.07/297.29  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)& -v1113(VarCurr,bitIndex34)&v1113(VarCurr,bitIndex33)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b100001(B))))).
% 299.07/297.29  b100001(bitIndex5).
% 299.07/297.29  -b100001(bitIndex4).
% 299.07/297.29  -b100001(bitIndex3).
% 299.07/297.29  -b100001(bitIndex2).
% 299.07/297.29  -b100001(bitIndex1).
% 299.07/297.29  b100001(bitIndex0).
% 299.07/297.29  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)& -v1113(VarCurr,bitIndex35)&v1113(VarCurr,bitIndex34)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b100010(B))))).
% 299.07/297.29  b100010(bitIndex5).
% 299.07/297.29  -b100010(bitIndex4).
% 299.07/297.29  -b100010(bitIndex3).
% 299.07/297.29  -b100010(bitIndex2).
% 299.07/297.29  b100010(bitIndex1).
% 299.07/297.29  -b100010(bitIndex0).
% 299.07/297.29  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)& -v1113(VarCurr,bitIndex36)&v1113(VarCurr,bitIndex35)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b100011(B))))).
% 299.07/297.31  b100011(bitIndex5).
% 299.07/297.31  -b100011(bitIndex4).
% 299.07/297.31  -b100011(bitIndex3).
% 299.07/297.31  -b100011(bitIndex2).
% 299.07/297.31  b100011(bitIndex1).
% 299.07/297.31  b100011(bitIndex0).
% 299.07/297.31  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)& -v1113(VarCurr,bitIndex37)&v1113(VarCurr,bitIndex36)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b100100(B))))).
% 299.07/297.31  b100100(bitIndex5).
% 299.07/297.31  -b100100(bitIndex4).
% 299.07/297.31  -b100100(bitIndex3).
% 299.07/297.31  b100100(bitIndex2).
% 299.07/297.31  -b100100(bitIndex1).
% 299.07/297.31  -b100100(bitIndex0).
% 299.07/297.31  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)& -v1113(VarCurr,bitIndex38)&v1113(VarCurr,bitIndex37)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b100101(B))))).
% 299.07/297.31  b100101(bitIndex5).
% 299.07/297.31  -b100101(bitIndex4).
% 299.07/297.31  -b100101(bitIndex3).
% 299.07/297.31  b100101(bitIndex2).
% 299.07/297.31  -b100101(bitIndex1).
% 299.07/297.31  b100101(bitIndex0).
% 299.07/297.31  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)& -v1113(VarCurr,bitIndex39)&v1113(VarCurr,bitIndex38)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b100110(B))))).
% 299.07/297.31  b100110(bitIndex5).
% 299.07/297.31  -b100110(bitIndex4).
% 299.07/297.31  -b100110(bitIndex3).
% 299.07/297.31  b100110(bitIndex2).
% 299.07/297.31  b100110(bitIndex1).
% 299.07/297.31  -b100110(bitIndex0).
% 299.07/297.31  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)& -v1113(VarCurr,bitIndex40)&v1113(VarCurr,bitIndex39)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b100111(B))))).
% 299.15/297.33  b100111(bitIndex5).
% 299.15/297.33  -b100111(bitIndex4).
% 299.15/297.33  -b100111(bitIndex3).
% 299.15/297.33  b100111(bitIndex2).
% 299.15/297.33  b100111(bitIndex1).
% 299.15/297.33  b100111(bitIndex0).
% 299.15/297.33  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)& -v1113(VarCurr,bitIndex41)&v1113(VarCurr,bitIndex40)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b101000(B))))).
% 299.15/297.33  b101000(bitIndex5).
% 299.15/297.33  -b101000(bitIndex4).
% 299.15/297.33  b101000(bitIndex3).
% 299.15/297.33  -b101000(bitIndex2).
% 299.15/297.33  -b101000(bitIndex1).
% 299.15/297.33  -b101000(bitIndex0).
% 299.15/297.33  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)& -v1113(VarCurr,bitIndex42)&v1113(VarCurr,bitIndex41)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b101001(B))))).
% 299.15/297.33  b101001(bitIndex5).
% 299.15/297.33  -b101001(bitIndex4).
% 299.15/297.33  b101001(bitIndex3).
% 299.15/297.33  -b101001(bitIndex2).
% 299.15/297.33  -b101001(bitIndex1).
% 299.15/297.33  b101001(bitIndex0).
% 299.15/297.33  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)& -v1113(VarCurr,bitIndex43)&v1113(VarCurr,bitIndex42)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b101010(B))))).
% 299.15/297.33  b101010(bitIndex5).
% 299.15/297.33  -b101010(bitIndex4).
% 299.15/297.33  b101010(bitIndex3).
% 299.15/297.33  -b101010(bitIndex2).
% 299.15/297.33  b101010(bitIndex1).
% 299.15/297.33  -b101010(bitIndex0).
% 299.15/297.33  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)& -v1113(VarCurr,bitIndex44)&v1113(VarCurr,bitIndex43)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b101011(B))))).
% 299.15/297.33  b101011(bitIndex5).
% 299.15/297.33  -b101011(bitIndex4).
% 299.15/297.33  b101011(bitIndex3).
% 299.15/297.33  -b101011(bitIndex2).
% 299.15/297.33  b101011(bitIndex1).
% 299.15/297.33  b101011(bitIndex0).
% 299.15/297.33  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)& -v1113(VarCurr,bitIndex45)&v1113(VarCurr,bitIndex44)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b101100(B))))).
% 299.15/297.33  b101100(bitIndex5).
% 299.15/297.33  -b101100(bitIndex4).
% 299.15/297.34  b101100(bitIndex3).
% 299.15/297.34  b101100(bitIndex2).
% 299.15/297.34  -b101100(bitIndex1).
% 299.15/297.34  -b101100(bitIndex0).
% 299.15/297.34  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)& -v1113(VarCurr,bitIndex46)&v1113(VarCurr,bitIndex45)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b101101(B))))).
% 299.15/297.34  b101101(bitIndex5).
% 299.15/297.34  -b101101(bitIndex4).
% 299.15/297.34  b101101(bitIndex3).
% 299.15/297.34  b101101(bitIndex2).
% 299.15/297.34  -b101101(bitIndex1).
% 299.15/297.34  b101101(bitIndex0).
% 299.15/297.34  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)& -v1113(VarCurr,bitIndex47)&v1113(VarCurr,bitIndex46)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b101110(B))))).
% 299.15/297.34  b101110(bitIndex5).
% 299.15/297.34  -b101110(bitIndex4).
% 299.15/297.34  b101110(bitIndex3).
% 299.15/297.34  b101110(bitIndex2).
% 299.15/297.34  b101110(bitIndex1).
% 299.15/297.34  -b101110(bitIndex0).
% 299.15/297.34  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)& -v1113(VarCurr,bitIndex48)&v1113(VarCurr,bitIndex47)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b101111(B))))).
% 299.15/297.34  b101111(bitIndex5).
% 299.15/297.34  -b101111(bitIndex4).
% 299.15/297.34  b101111(bitIndex3).
% 299.15/297.34  b101111(bitIndex2).
% 299.15/297.34  b101111(bitIndex1).
% 299.15/297.34  b101111(bitIndex0).
% 299.15/297.34  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)& -v1113(VarCurr,bitIndex49)&v1113(VarCurr,bitIndex48)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b110000(B))))).
% 299.15/297.34  b110000(bitIndex5).
% 299.15/297.34  b110000(bitIndex4).
% 299.15/297.34  -b110000(bitIndex3).
% 299.15/297.34  -b110000(bitIndex2).
% 299.15/297.34  -b110000(bitIndex1).
% 299.15/297.34  -b110000(bitIndex0).
% 299.15/297.34  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)& -v1113(VarCurr,bitIndex50)&v1113(VarCurr,bitIndex49)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b110001(B))))).
% 299.15/297.34  b110001(bitIndex5).
% 299.15/297.34  b110001(bitIndex4).
% 299.15/297.34  -b110001(bitIndex3).
% 299.15/297.34  -b110001(bitIndex2).
% 299.15/297.34  -b110001(bitIndex1).
% 299.15/297.34  b110001(bitIndex0).
% 299.15/297.34  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)& -v1113(VarCurr,bitIndex51)&v1113(VarCurr,bitIndex50)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b110010(B))))).
% 299.15/297.34  b110010(bitIndex5).
% 299.15/297.34  b110010(bitIndex4).
% 299.15/297.34  -b110010(bitIndex3).
% 299.15/297.34  -b110010(bitIndex2).
% 299.15/297.34  b110010(bitIndex1).
% 299.15/297.34  -b110010(bitIndex0).
% 299.15/297.34  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)& -v1113(VarCurr,bitIndex52)&v1113(VarCurr,bitIndex51)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b110011(B))))).
% 299.15/297.35  b110011(bitIndex5).
% 299.15/297.35  b110011(bitIndex4).
% 299.15/297.35  -b110011(bitIndex3).
% 299.15/297.35  -b110011(bitIndex2).
% 299.15/297.35  b110011(bitIndex1).
% 299.15/297.35  b110011(bitIndex0).
% 299.15/297.35  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)& -v1113(VarCurr,bitIndex53)&v1113(VarCurr,bitIndex52)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b110100(B))))).
% 299.15/297.35  b110100(bitIndex5).
% 299.15/297.35  b110100(bitIndex4).
% 299.15/297.35  -b110100(bitIndex3).
% 299.15/297.35  b110100(bitIndex2).
% 299.15/297.35  -b110100(bitIndex1).
% 299.15/297.35  -b110100(bitIndex0).
% 299.15/297.35  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)& -v1113(VarCurr,bitIndex54)&v1113(VarCurr,bitIndex53)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b110101(B))))).
% 299.15/297.35  b110101(bitIndex5).
% 299.15/297.35  b110101(bitIndex4).
% 299.15/297.35  -b110101(bitIndex3).
% 299.15/297.35  b110101(bitIndex2).
% 299.15/297.35  -b110101(bitIndex1).
% 299.15/297.35  b110101(bitIndex0).
% 299.15/297.35  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)& -v1113(VarCurr,bitIndex55)&v1113(VarCurr,bitIndex54)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b110110(B))))).
% 299.15/297.35  b110110(bitIndex5).
% 299.15/297.35  b110110(bitIndex4).
% 299.15/297.35  -b110110(bitIndex3).
% 299.15/297.35  b110110(bitIndex2).
% 299.15/297.35  b110110(bitIndex1).
% 299.15/297.35  -b110110(bitIndex0).
% 299.15/297.35  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)& -v1113(VarCurr,bitIndex56)&v1113(VarCurr,bitIndex55)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b110111(B))))).
% 299.15/297.35  b110111(bitIndex5).
% 299.15/297.35  b110111(bitIndex4).
% 299.15/297.35  -b110111(bitIndex3).
% 299.15/297.35  b110111(bitIndex2).
% 299.15/297.35  b110111(bitIndex1).
% 299.15/297.35  b110111(bitIndex0).
% 299.15/297.35  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)& -v1113(VarCurr,bitIndex57)&v1113(VarCurr,bitIndex56)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b111000(B))))).
% 299.15/297.35  b111000(bitIndex5).
% 299.15/297.35  b111000(bitIndex4).
% 299.15/297.35  b111000(bitIndex3).
% 299.15/297.35  -b111000(bitIndex2).
% 299.15/297.35  -b111000(bitIndex1).
% 299.15/297.35  -b111000(bitIndex0).
% 299.15/297.35  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)& -v1113(VarCurr,bitIndex58)&v1113(VarCurr,bitIndex57)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b111001(B))))).
% 299.15/297.35  b111001(bitIndex5).
% 299.15/297.35  b111001(bitIndex4).
% 299.15/297.35  b111001(bitIndex3).
% 299.15/297.35  -b111001(bitIndex2).
% 299.15/297.35  -b111001(bitIndex1).
% 299.15/297.35  b111001(bitIndex0).
% 299.15/297.35  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)& -v1113(VarCurr,bitIndex59)&v1113(VarCurr,bitIndex58)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b111010(B))))).
% 299.15/297.35  b111010(bitIndex5).
% 299.15/297.35  b111010(bitIndex4).
% 299.15/297.35  b111010(bitIndex3).
% 299.15/297.35  -b111010(bitIndex2).
% 299.15/297.35  b111010(bitIndex1).
% 299.15/297.35  -b111010(bitIndex0).
% 299.15/297.35  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)& -v1113(VarCurr,bitIndex60)&v1113(VarCurr,bitIndex59)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b111011(B))))).
% 299.15/297.35  b111011(bitIndex5).
% 299.15/297.35  b111011(bitIndex4).
% 299.15/297.35  b111011(bitIndex3).
% 299.15/297.35  -b111011(bitIndex2).
% 299.15/297.35  b111011(bitIndex1).
% 299.15/297.35  b111011(bitIndex0).
% 299.15/297.35  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)& -v1113(VarCurr,bitIndex61)&v1113(VarCurr,bitIndex60)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b111100(B))))).
% 299.15/297.35  b111100(bitIndex5).
% 299.15/297.35  b111100(bitIndex4).
% 299.15/297.35  b111100(bitIndex3).
% 299.15/297.35  b111100(bitIndex2).
% 299.15/297.35  -b111100(bitIndex1).
% 299.15/297.35  -b111100(bitIndex0).
% 299.15/297.35  all VarCurr (-v1113(VarCurr,bitIndex63)& -v1113(VarCurr,bitIndex62)&v1113(VarCurr,bitIndex61)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b111101(B))))).
% 299.15/297.35  b111101(bitIndex5).
% 299.15/297.35  b111101(bitIndex4).
% 299.15/297.35  b111101(bitIndex3).
% 299.15/297.35  b111101(bitIndex2).
% 299.15/297.35  -b111101(bitIndex1).
% 299.15/297.35  b111101(bitIndex0).
% 299.15/297.35  all VarCurr (-v1113(VarCurr,bitIndex63)&v1113(VarCurr,bitIndex62)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->b111110(B))))).
% 299.15/297.35  b111110(bitIndex5).
% 299.15/297.35  b111110(bitIndex4).
% 299.15/297.35  b111110(bitIndex3).
% 299.15/297.35  b111110(bitIndex2).
% 299.15/297.35  b111110(bitIndex1).
% 299.15/297.35  -b111110(bitIndex0).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex63)-> (all B (range_5_0(B)-> (v1111(VarCurr,B)<->$T)))).
% 299.15/297.35  all B (range_5_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B).
% 299.15/297.35  b111111(bitIndex5).
% 299.15/297.35  b111111(bitIndex4).
% 299.15/297.35  b111111(bitIndex3).
% 299.15/297.35  b111111(bitIndex2).
% 299.15/297.35  b111111(bitIndex1).
% 299.15/297.35  b111111(bitIndex0).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex1)<->v1011(VarCurr,bitIndex1)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex2)<->v1011(VarCurr,bitIndex2)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex3)<->v1011(VarCurr,bitIndex3)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex4)<->v1011(VarCurr,bitIndex4)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex5)<->v1011(VarCurr,bitIndex5)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex6)<->v1011(VarCurr,bitIndex6)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex7)<->v1011(VarCurr,bitIndex7)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex8)<->v1011(VarCurr,bitIndex8)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex9)<->v1011(VarCurr,bitIndex9)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex10)<->v1011(VarCurr,bitIndex10)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex11)<->v1011(VarCurr,bitIndex11)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex12)<->v1011(VarCurr,bitIndex12)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex13)<->v1011(VarCurr,bitIndex13)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex14)<->v1011(VarCurr,bitIndex14)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex15)<->v1011(VarCurr,bitIndex15)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex16)<->v1011(VarCurr,bitIndex16)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex17)<->v1011(VarCurr,bitIndex17)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex18)<->v1011(VarCurr,bitIndex18)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex19)<->v1011(VarCurr,bitIndex19)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex20)<->v1011(VarCurr,bitIndex20)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex21)<->v1011(VarCurr,bitIndex21)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex22)<->v1011(VarCurr,bitIndex22)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex23)<->v1011(VarCurr,bitIndex23)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex24)<->v1011(VarCurr,bitIndex24)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex25)<->v1011(VarCurr,bitIndex25)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex26)<->v1011(VarCurr,bitIndex26)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex27)<->v1011(VarCurr,bitIndex27)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex28)<->v1011(VarCurr,bitIndex28)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex29)<->v1011(VarCurr,bitIndex29)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex30)<->v1011(VarCurr,bitIndex30)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex31)<->v1011(VarCurr,bitIndex31)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex32)<->v1011(VarCurr,bitIndex32)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex33)<->v1011(VarCurr,bitIndex33)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex34)<->v1011(VarCurr,bitIndex34)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex35)<->v1011(VarCurr,bitIndex35)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex36)<->v1011(VarCurr,bitIndex36)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex37)<->v1011(VarCurr,bitIndex37)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex38)<->v1011(VarCurr,bitIndex38)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex39)<->v1011(VarCurr,bitIndex39)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex40)<->v1011(VarCurr,bitIndex40)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex41)<->v1011(VarCurr,bitIndex41)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex42)<->v1011(VarCurr,bitIndex42)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex43)<->v1011(VarCurr,bitIndex43)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex44)<->v1011(VarCurr,bitIndex44)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex45)<->v1011(VarCurr,bitIndex45)).
% 299.15/297.35  all VarCurr (v1113(VarCurr,bitIndex46)<->v1011(VarCurr,bitIndex46)).
% 299.15/297.36  all VarCurr (v1113(VarCurr,bitIndex47)<->v1011(VarCurr,bitIndex47)).
% 299.15/297.36  all VarCurr (v1113(VarCurr,bitIndex48)<->v1011(VarCurr,bitIndex48)).
% 299.15/297.36  all VarCurr (v1113(VarCurr,bitIndex49)<->v1011(VarCurr,bitIndex49)).
% 299.15/297.36  all VarCurr (v1113(VarCurr,bitIndex50)<->v1011(VarCurr,bitIndex50)).
% 299.15/297.36  all VarCurr (v1113(VarCurr,bitIndex51)<->v1011(VarCurr,bitIndex51)).
% 299.15/297.36  all VarCurr (v1113(VarCurr,bitIndex52)<->v1011(VarCurr,bitIndex52)).
% 299.15/297.36  all VarCurr (v1113(VarCurr,bitIndex53)<->v1011(VarCurr,bitIndex53)).
% 299.15/297.36  all VarCurr (v1113(VarCurr,bitIndex54)<->v1011(VarCurr,bitIndex54)).
% 299.15/297.36  all VarCurr (v1113(VarCurr,bitIndex55)<->v1011(VarCurr,bitIndex55)).
% 299.15/297.36  all VarCurr (v1113(VarCurr,bitIndex56)<->v1011(VarCurr,bitIndex56)).
% 299.15/297.36  all VarCurr (v1113(VarCurr,bitIndex57)<->v1011(VarCurr,bitIndex57)).
% 299.15/297.36  all VarCurr (v1113(VarCurr,bitIndex58)<->v1011(VarCurr,bitIndex58)).
% 299.15/297.36  all VarCurr (v1113(VarCurr,bitIndex59)<->v1011(VarCurr,bitIndex59)).
% 299.15/297.36  all VarCurr (v1113(VarCurr,bitIndex60)<->v1011(VarCurr,bitIndex60)).
% 299.15/297.36  all VarCurr (v1113(VarCurr,bitIndex61)<->v1011(VarCurr,bitIndex61)).
% 299.15/297.36  all VarCurr (v1113(VarCurr,bitIndex62)<->v1011(VarCurr,bitIndex62)).
% 299.15/297.36  all VarCurr (v1113(VarCurr,bitIndex63)<->v1011(VarCurr,bitIndex63)).
% 299.15/297.36  all VarCurr (v1003(VarCurr)<->v1005(VarCurr)).
% 299.15/297.36  all VarCurr (v1005(VarCurr)<->v1007(VarCurr)).
% 299.15/297.36  all VarCurr (v1007(VarCurr)<->v1009(VarCurr)).
% 299.15/297.36  all VarCurr (v1009(VarCurr)<->v1036(VarCurr)|v1067(VarCurr)).
% 299.15/297.36  all VarCurr (v1067(VarCurr)<->v1068(VarCurr)|v1083(VarCurr)).
% 299.15/297.36  all VarCurr (v1083(VarCurr)<->v1084(VarCurr)|v1091(VarCurr)).
% 299.15/297.36  all VarCurr (v1091(VarCurr)<->v1092(VarCurr)|v1095(VarCurr)).
% 299.15/297.36  all VarCurr (v1095(VarCurr)<->v1096(VarCurr)|v1097(VarCurr)).
% 299.15/297.36  all VarCurr (v1097(VarCurr)<->v1011(VarCurr,bitIndex62)|v1011(VarCurr,bitIndex63)).
% 299.15/297.36  all VarCurr (v1096(VarCurr)<->v1011(VarCurr,bitIndex60)|v1011(VarCurr,bitIndex61)).
% 299.15/297.36  all VarCurr (v1092(VarCurr)<->v1093(VarCurr)|v1094(VarCurr)).
% 299.15/297.36  all VarCurr (v1094(VarCurr)<->v1011(VarCurr,bitIndex58)|v1011(VarCurr,bitIndex59)).
% 299.15/297.36  all VarCurr (v1093(VarCurr)<->v1011(VarCurr,bitIndex56)|v1011(VarCurr,bitIndex57)).
% 299.15/297.36  all VarCurr (v1084(VarCurr)<->v1085(VarCurr)|v1088(VarCurr)).
% 299.15/297.36  all VarCurr (v1088(VarCurr)<->v1089(VarCurr)|v1090(VarCurr)).
% 299.15/297.36  all VarCurr (v1090(VarCurr)<->v1011(VarCurr,bitIndex54)|v1011(VarCurr,bitIndex55)).
% 299.15/297.36  all VarCurr (v1089(VarCurr)<->v1011(VarCurr,bitIndex52)|v1011(VarCurr,bitIndex53)).
% 299.15/297.36  all VarCurr (v1085(VarCurr)<->v1086(VarCurr)|v1087(VarCurr)).
% 299.15/297.36  all VarCurr (v1087(VarCurr)<->v1011(VarCurr,bitIndex50)|v1011(VarCurr,bitIndex51)).
% 299.15/297.36  all VarCurr (v1086(VarCurr)<->v1011(VarCurr,bitIndex48)|v1011(VarCurr,bitIndex49)).
% 299.15/297.36  all VarCurr (v1068(VarCurr)<->v1069(VarCurr)|v1076(VarCurr)).
% 299.15/297.36  all VarCurr (v1076(VarCurr)<->v1077(VarCurr)|v1080(VarCurr)).
% 299.15/297.36  all VarCurr (v1080(VarCurr)<->v1081(VarCurr)|v1082(VarCurr)).
% 299.15/297.36  all VarCurr (v1082(VarCurr)<->v1011(VarCurr,bitIndex46)|v1011(VarCurr,bitIndex47)).
% 299.15/297.36  all VarCurr (v1081(VarCurr)<->v1011(VarCurr,bitIndex44)|v1011(VarCurr,bitIndex45)).
% 299.15/297.36  all VarCurr (v1077(VarCurr)<->v1078(VarCurr)|v1079(VarCurr)).
% 299.15/297.36  all VarCurr (v1079(VarCurr)<->v1011(VarCurr,bitIndex42)|v1011(VarCurr,bitIndex43)).
% 299.15/297.36  all VarCurr (v1078(VarCurr)<->v1011(VarCurr,bitIndex40)|v1011(VarCurr,bitIndex41)).
% 299.15/297.36  all VarCurr (v1069(VarCurr)<->v1070(VarCurr)|v1073(VarCurr)).
% 299.15/297.36  all VarCurr (v1073(VarCurr)<->v1074(VarCurr)|v1075(VarCurr)).
% 299.15/297.36  all VarCurr (v1075(VarCurr)<->v1011(VarCurr,bitIndex38)|v1011(VarCurr,bitIndex39)).
% 299.15/297.36  all VarCurr (v1074(VarCurr)<->v1011(VarCurr,bitIndex36)|v1011(VarCurr,bitIndex37)).
% 299.15/297.36  all VarCurr (v1070(VarCurr)<->v1071(VarCurr)|v1072(VarCurr)).
% 299.15/297.36  all VarCurr (v1072(VarCurr)<->v1011(VarCurr,bitIndex34)|v1011(VarCurr,bitIndex35)).
% 299.15/297.36  all VarCurr (v1071(VarCurr)<->v1011(VarCurr,bitIndex32)|v1011(VarCurr,bitIndex33)).
% 299.15/297.36  all VarCurr (v1036(VarCurr)<->v1037(VarCurr)|v1052(VarCurr)).
% 299.15/297.36  all VarCurr (v1052(VarCurr)<->v1053(VarCurr)|v1060(VarCurr)).
% 299.15/297.36  all VarCurr (v1060(VarCurr)<->v1061(VarCurr)|v1064(VarCurr)).
% 299.15/297.36  all VarCurr (v1064(VarCurr)<->v1065(VarCurr)|v1066(VarCurr)).
% 299.15/297.36  all VarCurr (v1066(VarCurr)<->v1011(VarCurr,bitIndex30)|v1011(VarCurr,bitIndex31)).
% 299.15/297.36  all VarCurr (v1065(VarCurr)<->v1011(VarCurr,bitIndex28)|v1011(VarCurr,bitIndex29)).
% 299.15/297.36  all VarCurr (v1061(VarCurr)<->v1062(VarCurr)|v1063(VarCurr)).
% 299.15/297.37  all VarCurr (v1063(VarCurr)<->v1011(VarCurr,bitIndex26)|v1011(VarCurr,bitIndex27)).
% 299.15/297.37  all VarCurr (v1062(VarCurr)<->v1011(VarCurr,bitIndex24)|v1011(VarCurr,bitIndex25)).
% 299.15/297.37  all VarCurr (v1053(VarCurr)<->v1054(VarCurr)|v1057(VarCurr)).
% 299.15/297.37  all VarCurr (v1057(VarCurr)<->v1058(VarCurr)|v1059(VarCurr)).
% 299.15/297.37  all VarCurr (v1059(VarCurr)<->v1011(VarCurr,bitIndex22)|v1011(VarCurr,bitIndex23)).
% 299.15/297.37  all VarCurr (v1058(VarCurr)<->v1011(VarCurr,bitIndex20)|v1011(VarCurr,bitIndex21)).
% 299.15/297.37  all VarCurr (v1054(VarCurr)<->v1055(VarCurr)|v1056(VarCurr)).
% 299.15/297.37  all VarCurr (v1056(VarCurr)<->v1011(VarCurr,bitIndex18)|v1011(VarCurr,bitIndex19)).
% 299.15/297.37  all VarCurr (v1055(VarCurr)<->v1011(VarCurr,bitIndex16)|v1011(VarCurr,bitIndex17)).
% 299.15/297.37  all VarCurr (v1037(VarCurr)<->v1038(VarCurr)|v1045(VarCurr)).
% 299.15/297.37  all VarCurr (v1045(VarCurr)<->v1046(VarCurr)|v1049(VarCurr)).
% 299.15/297.37  all VarCurr (v1049(VarCurr)<->v1050(VarCurr)|v1051(VarCurr)).
% 299.15/297.37  all VarCurr (v1051(VarCurr)<->v1011(VarCurr,bitIndex14)|v1011(VarCurr,bitIndex15)).
% 299.15/297.37  all VarCurr (v1050(VarCurr)<->v1011(VarCurr,bitIndex12)|v1011(VarCurr,bitIndex13)).
% 299.15/297.37  all VarCurr (v1046(VarCurr)<->v1047(VarCurr)|v1048(VarCurr)).
% 299.15/297.37  all VarCurr (v1048(VarCurr)<->v1011(VarCurr,bitIndex10)|v1011(VarCurr,bitIndex11)).
% 299.15/297.37  all VarCurr (v1047(VarCurr)<->v1011(VarCurr,bitIndex8)|v1011(VarCurr,bitIndex9)).
% 299.15/297.37  all VarCurr (v1038(VarCurr)<->v1039(VarCurr)|v1042(VarCurr)).
% 299.15/297.37  all VarCurr (v1042(VarCurr)<->v1043(VarCurr)|v1044(VarCurr)).
% 299.15/297.37  all VarCurr (v1044(VarCurr)<->v1011(VarCurr,bitIndex6)|v1011(VarCurr,bitIndex7)).
% 299.15/297.37  all VarCurr (v1043(VarCurr)<->v1011(VarCurr,bitIndex4)|v1011(VarCurr,bitIndex5)).
% 299.15/297.37  all VarCurr (v1039(VarCurr)<->v1040(VarCurr)|v1041(VarCurr)).
% 299.15/297.37  all VarCurr (v1041(VarCurr)<->v1011(VarCurr,bitIndex2)|v1011(VarCurr,bitIndex3)).
% 299.15/297.37  all VarCurr (v1040(VarCurr)<->v1011(VarCurr,bitIndex0)|v1011(VarCurr,bitIndex1)).
% 299.15/297.37  all VarCurr B (range_63_0(B)-> (v1011(VarCurr,B)<->v1013(VarCurr,B))).
% 299.15/297.37  all VarCurr B (range_63_0(B)-> (v1013(VarCurr,B)<->v1015(VarCurr,B))).
% 299.15/297.37  all VarCurr B (range_63_0(B)-> (v1015(VarCurr,B)<->v1017(VarCurr,B))).
% 299.15/297.37  all VarCurr B (range_63_0(B)-> (v1017(VarCurr,B)<->v1019(VarCurr,B)|v1032(VarCurr,B))).
% 299.15/297.37  all B (range_63_0(B)-> (v1032(constB0,B)<->$F)).
% 299.15/297.37  all VarCurr B (range_63_0(B)-> (v1019(VarCurr,B)<->v1021(VarCurr,B)&v1023(VarCurr,B))).
% 299.15/297.37  all B (range_63_0(B)-> (v1021(constB0,B)<->$F)).
% 299.15/297.37  all VarCurr B (range_63_0(B)-> (v1023(VarCurr,B)<->v1025(VarCurr,B))).
% 299.15/297.37  all VarCurr B (range_63_0(B)-> (v1025(VarCurr,B)<->v1027(VarCurr,B))).
% 299.15/297.37  all VarCurr B (range_63_0(B)-> (v1027(VarCurr,B)<->v1029(VarCurr,B))).
% 299.15/297.37  v1029(constB0,bitIndex63)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex62)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex61)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex60)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex59)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex58)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex57)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex56)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex55)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex54)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex53)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex52)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex51)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex50)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex49)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex48)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex47)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex46)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex45)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex44)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex43)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex42)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex41)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex40)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex39)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex38)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex37)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex36)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex35)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex34)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex33)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex32)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex31)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex30)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex29)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex28)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex27)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex26)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex25)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex24)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex23)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex22)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex21)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex20)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex19)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex18)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex17)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex16)<->$F.
% 299.15/297.37  v1029(constB0,bitIndex15)<->$F.
% 299.21/297.38  v1029(constB0,bitIndex14)<->$F.
% 299.21/297.38  v1029(constB0,bitIndex13)<->$F.
% 299.21/297.38  v1029(constB0,bitIndex12)<->$F.
% 299.21/297.38  v1029(constB0,bitIndex11)<->$F.
% 299.21/297.38  v1029(constB0,bitIndex10)<->$F.
% 299.21/297.38  v1029(constB0,bitIndex9)<->$F.
% 299.21/297.38  v1029(constB0,bitIndex8)<->$F.
% 299.21/297.38  v1029(constB0,bitIndex7)<->$F.
% 299.21/297.38  v1029(constB0,bitIndex6)<->$F.
% 299.21/297.38  v1029(constB0,bitIndex5)<->$F.
% 299.21/297.38  v1029(constB0,bitIndex4)<->$F.
% 299.21/297.38  v1029(constB0,bitIndex3)<->$F.
% 299.21/297.38  v1029(constB0,bitIndex2)<->$F.
% 299.21/297.38  v1029(constB0,bitIndex1)<->$F.
% 299.21/297.38  v1029(constB0,bitIndex0)<->$F.
% 299.21/297.38  all VarCurr (-v991(VarCurr)-> (v989(VarCurr)<->$F)).
% 299.21/297.38  all VarCurr (v991(VarCurr)-> (v989(VarCurr)<->$T)).
% 299.21/297.38  all VarCurr (v991(VarCurr)<-> (v237(VarCurr,bitIndex2)<->$F)& (v237(VarCurr,bitIndex1)<->$F)& (v237(VarCurr,bitIndex0)<->$T)).
% 299.21/297.38  all VarCurr (v969(VarCurr)<->v971(VarCurr)).
% 299.21/297.38  all VarCurr (v971(VarCurr)<->v973(VarCurr)).
% 299.21/297.38  all VarCurr (v973(VarCurr)<->v975(VarCurr)).
% 299.21/297.38  all VarCurr (v975(VarCurr)<->v222(VarCurr,bitIndex0)|v222(VarCurr,bitIndex1)).
% 299.21/297.38  all VarCurr (v807(VarCurr)<->v809(VarCurr)).
% 299.21/297.38  all VarCurr (v809(VarCurr)<->v811(VarCurr)).
% 299.21/297.38  all VarCurr (v811(VarCurr)<->v813(VarCurr)).
% 299.21/297.38  all VarCurr (v813(VarCurr)<->v815(VarCurr,bitIndex1)).
% 299.21/297.38  all VarCurr (v815(VarCurr,bitIndex1)<->v933(VarCurr,bitIndex1)).
% 299.21/297.38  all VarCurr (-v934(VarCurr)& -v942(VarCurr)-> (all B (range_2_0(B)-> (v933(VarCurr,B)<->v950(VarCurr,B))))).
% 299.21/297.38  all VarCurr (v942(VarCurr)-> (all B (range_2_0(B)-> (v933(VarCurr,B)<->v943(VarCurr,B))))).
% 299.21/297.38  all VarCurr (v934(VarCurr)-> (all B (range_2_0(B)-> (v933(VarCurr,B)<->v937(VarCurr,B))))).
% 299.21/297.38  all VarCurr (-v951(VarCurr)-> (all B (range_2_0(B)-> (v950(VarCurr,B)<->$T)))).
% 299.21/297.38  all VarCurr (v951(VarCurr)-> (all B (range_2_0(B)-> (v950(VarCurr,B)<->$F)))).
% 299.21/297.38  all VarCurr (v952(VarCurr)<->v954(VarCurr)|v957(VarCurr)).
% 299.21/297.38  all VarCurr (v957(VarCurr)<-> (v819(VarCurr,bitIndex1)<->$T)& (v819(VarCurr,bitIndex0)<->$T)).
% 299.21/297.38  all VarCurr (v954(VarCurr)<->v955(VarCurr)|v956(VarCurr)).
% 299.21/297.38  all VarCurr (v956(VarCurr)<-> (v819(VarCurr,bitIndex1)<->$T)& (v819(VarCurr,bitIndex0)<->$F)).
% 299.21/297.38  all VarCurr (v955(VarCurr)<-> (v819(VarCurr,bitIndex1)<->$F)& (v819(VarCurr,bitIndex0)<->$T)).
% 299.21/297.38  all VarCurr (v951(VarCurr)<-> (v819(VarCurr,bitIndex1)<->$F)& (v819(VarCurr,bitIndex0)<->$F)).
% 299.21/297.38  all VarCurr (v949(VarCurr)<-> (v817(VarCurr,bitIndex1)<->$T)& (v817(VarCurr,bitIndex0)<->$T)).
% 299.21/297.38  all VarCurr (-v944(VarCurr)& -v947(VarCurr)-> (all B (range_2_0(B)-> (v943(VarCurr,B)<->b011(B))))).
% 299.21/297.38  all VarCurr (v947(VarCurr)-> (all B (range_2_0(B)-> (v943(VarCurr,B)<->$T)))).
% 299.21/297.38  all VarCurr (v944(VarCurr)-> (all B (range_2_0(B)-> (v943(VarCurr,B)<->$F)))).
% 299.21/297.38  all VarCurr (v948(VarCurr)<-> (v819(VarCurr,bitIndex1)<->$T)& (v819(VarCurr,bitIndex0)<->$T)).
% 299.21/297.38  all VarCurr (v947(VarCurr)<-> (v819(VarCurr,bitIndex1)<->$T)& (v819(VarCurr,bitIndex0)<->$F)).
% 299.21/297.38  all VarCurr (v944(VarCurr)<->v945(VarCurr)|v946(VarCurr)).
% 299.21/297.38  all VarCurr (v946(VarCurr)<-> (v819(VarCurr,bitIndex1)<->$F)& (v819(VarCurr,bitIndex0)<->$T)).
% 299.21/297.38  all VarCurr (v945(VarCurr)<-> (v819(VarCurr,bitIndex1)<->$F)& (v819(VarCurr,bitIndex0)<->$F)).
% 299.21/297.38  all VarCurr (v942(VarCurr)<-> (v817(VarCurr,bitIndex1)<->$T)& (v817(VarCurr,bitIndex0)<->$F)).
% 299.21/297.38  all VarCurr (-v938(VarCurr)& -v939(VarCurr)& -v940(VarCurr)-> (all B (range_2_0(B)-> (v937(VarCurr,B)<->$T)))).
% 299.21/297.38  all VarCurr (v940(VarCurr)-> (all B (range_2_0(B)-> (v937(VarCurr,B)<->b011(B))))).
% 299.21/297.38  all VarCurr (v939(VarCurr)-> (all B (range_2_0(B)-> (v937(VarCurr,B)<->$T)))).
% 299.21/297.38  all VarCurr (v938(VarCurr)-> (all B (range_2_0(B)-> (v937(VarCurr,B)<->$F)))).
% 299.21/297.38  all VarCurr (v941(VarCurr)<-> (v819(VarCurr,bitIndex1)<->$T)& (v819(VarCurr,bitIndex0)<->$T)).
% 299.21/297.38  all VarCurr (v940(VarCurr)<-> (v819(VarCurr,bitIndex1)<->$T)& (v819(VarCurr,bitIndex0)<->$F)).
% 299.21/297.38  all VarCurr (v939(VarCurr)<-> (v819(VarCurr,bitIndex1)<->$F)& (v819(VarCurr,bitIndex0)<->$T)).
% 299.21/297.38  all VarCurr (v938(VarCurr)<-> (v819(VarCurr,bitIndex1)<->$F)& (v819(VarCurr,bitIndex0)<->$F)).
% 299.21/297.38  all VarCurr (v934(VarCurr)<->v935(VarCurr)|v936(VarCurr)).
% 299.21/297.38  all VarCurr (v936(VarCurr)<-> (v817(VarCurr,bitIndex1)<->$F)& (v817(VarCurr,bitIndex0)<->$T)).
% 299.21/297.38  all VarCurr (v935(VarCurr)<-> (v817(VarCurr,bitIndex1)<->$F)& (v817(VarCurr,bitIndex0)<->$F)).
% 299.21/297.38  all VarCurr (-v893(VarCurr)& -v903(VarCurr)& -v913(VarCurr)-> (all B (range_1_0(B)-> (v819(VarCurr,B)<->v924(VarCurr,B))))).
% 299.21/297.38  all VarCurr (v913(VarCurr)-> (all B (range_1_0(B)-> (v819(VarCurr,B)<->v914(VarCurr,B))))).
% 299.21/297.38  all VarCurr (v903(VarCurr)-> (all B (range_1_0(B)-> (v819(VarCurr,B)<->v904(VarCurr,B))))).
% 299.21/297.38  all VarCurr (v893(VarCurr)-> (all B (range_1_0(B)-> (v819(VarCurr,B)<->v894(VarCurr,B))))).
% 299.21/297.38  all VarCurr (-v925(VarCurr)& -v927(VarCurr)& -v929(VarCurr)-> (all B (range_1_0(B)-> (v924(VarCurr,B)<->b10(B))))).
% 299.21/297.38  all VarCurr (v929(VarCurr)-> (all B (range_1_0(B)-> (v924(VarCurr,B)<->$T)))).
% 299.21/297.38  all VarCurr (v927(VarCurr)-> (all B (range_1_0(B)-> (v924(VarCurr,B)<->$F)))).
% 299.21/297.38  all VarCurr (v925(VarCurr)-> (all B (range_1_0(B)-> (v924(VarCurr,B)<->b01(B))))).
% 299.21/297.38  all VarCurr (v931(VarCurr)<-> (v932(VarCurr,bitIndex1)<->$T)& (v932(VarCurr,bitIndex0)<->$T)).
% 299.21/297.38  all VarCurr (v932(VarCurr,bitIndex0)<->v891(VarCurr)).
% 299.21/297.38  all VarCurr (v932(VarCurr,bitIndex1)<->v821(VarCurr)).
% 299.21/297.38  all VarCurr (v929(VarCurr)<-> (v930(VarCurr,bitIndex1)<->$T)& (v930(VarCurr,bitIndex0)<->$F)).
% 299.21/297.38  all VarCurr (v930(VarCurr,bitIndex0)<->v891(VarCurr)).
% 299.21/297.38  all VarCurr (v930(VarCurr,bitIndex1)<->v821(VarCurr)).
% 299.21/297.38  all VarCurr (v927(VarCurr)<-> (v928(VarCurr,bitIndex1)<->$F)& (v928(VarCurr,bitIndex0)<->$T)).
% 299.21/297.38  all VarCurr (v928(VarCurr,bitIndex0)<->v891(VarCurr)).
% 299.21/297.38  all VarCurr (v928(VarCurr,bitIndex1)<->v821(VarCurr)).
% 299.21/297.38  all VarCurr (v925(VarCurr)<-> (v926(VarCurr,bitIndex1)<->$F)& (v926(VarCurr,bitIndex0)<->$F)).
% 299.21/297.38  all VarCurr (v926(VarCurr,bitIndex0)<->v891(VarCurr)).
% 299.21/297.38  all VarCurr (v926(VarCurr,bitIndex1)<->v821(VarCurr)).
% 299.21/297.38  all VarCurr (v923(VarCurr)<-> (v817(VarCurr,bitIndex1)<->$T)& (v817(VarCurr,bitIndex0)<->$T)).
% 299.21/297.38  all VarCurr (-v915(VarCurr)& -v917(VarCurr)& -v919(VarCurr)-> (all B (range_1_0(B)-> (v914(VarCurr,B)<->$T)))).
% 299.21/297.38  all VarCurr (v919(VarCurr)-> (all B (range_1_0(B)-> (v914(VarCurr,B)<->b10(B))))).
% 299.21/297.38  all VarCurr (v917(VarCurr)-> (all B (range_1_0(B)-> (v914(VarCurr,B)<->b01(B))))).
% 299.21/297.38  all VarCurr (v915(VarCurr)-> (all B (range_1_0(B)-> (v914(VarCurr,B)<->$F)))).
% 299.21/297.38  all VarCurr (v921(VarCurr)<-> (v922(VarCurr,bitIndex1)<->$T)& (v922(VarCurr,bitIndex0)<->$T)).
% 299.21/297.38  all VarCurr (v922(VarCurr,bitIndex0)<->v889(VarCurr)).
% 299.21/297.38  all VarCurr (v922(VarCurr,bitIndex1)<->v821(VarCurr)).
% 299.21/297.38  all VarCurr (v919(VarCurr)<-> (v920(VarCurr,bitIndex1)<->$T)& (v920(VarCurr,bitIndex0)<->$F)).
% 299.21/297.38  all VarCurr (v920(VarCurr,bitIndex0)<->v889(VarCurr)).
% 299.21/297.38  all VarCurr (v920(VarCurr,bitIndex1)<->v821(VarCurr)).
% 299.21/297.38  all VarCurr (v917(VarCurr)<-> (v918(VarCurr,bitIndex1)<->$F)& (v918(VarCurr,bitIndex0)<->$T)).
% 299.21/297.38  all VarCurr (v918(VarCurr,bitIndex0)<->v889(VarCurr)).
% 299.21/297.38  all VarCurr (v918(VarCurr,bitIndex1)<->v821(VarCurr)).
% 299.21/297.38  all VarCurr (v915(VarCurr)<-> (v916(VarCurr,bitIndex1)<->$F)& (v916(VarCurr,bitIndex0)<->$F)).
% 299.21/297.38  all VarCurr (v916(VarCurr,bitIndex0)<->v889(VarCurr)).
% 299.21/297.38  all VarCurr (v916(VarCurr,bitIndex1)<->v821(VarCurr)).
% 299.21/297.38  all VarCurr (v913(VarCurr)<-> (v817(VarCurr,bitIndex1)<->$T)& (v817(VarCurr,bitIndex0)<->$F)).
% 299.21/297.38  all VarCurr (-v905(VarCurr)& -v907(VarCurr)& -v909(VarCurr)-> (all B (range_1_0(B)-> (v904(VarCurr,B)<->b10(B))))).
% 299.21/297.38  all VarCurr (v909(VarCurr)-> (all B (range_1_0(B)-> (v904(VarCurr,B)<->$T)))).
% 299.21/297.38  all VarCurr (v907(VarCurr)-> (all B (range_1_0(B)-> (v904(VarCurr,B)<->$F)))).
% 299.21/297.38  all VarCurr (v905(VarCurr)-> (all B (range_1_0(B)-> (v904(VarCurr,B)<->b01(B))))).
% 299.21/297.38  all VarCurr (v911(VarCurr)<-> (v912(VarCurr,bitIndex1)<->$T)& (v912(VarCurr,bitIndex0)<->$T)).
% 299.21/297.38  all VarCurr (v912(VarCurr,bitIndex0)<->v891(VarCurr)).
% 299.21/297.38  all VarCurr (v912(VarCurr,bitIndex1)<->v821(VarCurr)).
% 299.21/297.38  all VarCurr (v909(VarCurr)<-> (v910(VarCurr,bitIndex1)<->$T)& (v910(VarCurr,bitIndex0)<->$F)).
% 299.21/297.38  all VarCurr (v910(VarCurr,bitIndex0)<->v891(VarCurr)).
% 299.21/297.38  all VarCurr (v910(VarCurr,bitIndex1)<->v821(VarCurr)).
% 299.21/297.38  all VarCurr (v907(VarCurr)<-> (v908(VarCurr,bitIndex1)<->$F)& (v908(VarCurr,bitIndex0)<->$T)).
% 299.21/297.38  all VarCurr (v908(VarCurr,bitIndex0)<->v891(VarCurr)).
% 299.21/297.38  all VarCurr (v908(VarCurr,bitIndex1)<->v821(VarCurr)).
% 299.21/297.38  all VarCurr (v905(VarCurr)<-> (v906(VarCurr,bitIndex1)<->$F)& (v906(VarCurr,bitIndex0)<->$F)).
% 299.21/297.38  all VarCurr (v906(VarCurr,bitIndex0)<->v891(VarCurr)).
% 299.21/297.38  all VarCurr (v906(VarCurr,bitIndex1)<->v821(VarCurr)).
% 299.21/297.38  v891(constB0)<->$F.
% 299.21/297.38  all VarCurr (v903(VarCurr)<-> (v817(VarCurr,bitIndex1)<->$F)& (v817(VarCurr,bitIndex0)<->$T)).
% 299.21/297.39  all VarCurr (-v895(VarCurr)& -v897(VarCurr)& -v899(VarCurr)-> (all B (range_1_0(B)-> (v894(VarCurr,B)<->$T)))).
% 299.21/297.39  all VarCurr (v899(VarCurr)-> (all B (range_1_0(B)-> (v894(VarCurr,B)<->b10(B))))).
% 299.21/297.39  all VarCurr (v897(VarCurr)-> (all B (range_1_0(B)-> (v894(VarCurr,B)<->b01(B))))).
% 299.21/297.39  all VarCurr (v895(VarCurr)-> (all B (range_1_0(B)-> (v894(VarCurr,B)<->$F)))).
% 299.21/297.39  all VarCurr (v901(VarCurr)<-> (v902(VarCurr,bitIndex1)<->$T)& (v902(VarCurr,bitIndex0)<->$T)).
% 299.21/297.39  all VarCurr (v902(VarCurr,bitIndex0)<->v889(VarCurr)).
% 299.21/297.39  all VarCurr (v902(VarCurr,bitIndex1)<->v821(VarCurr)).
% 299.21/297.39  all VarCurr (v899(VarCurr)<-> (v900(VarCurr,bitIndex1)<->$T)& (v900(VarCurr,bitIndex0)<->$F)).
% 299.21/297.39  all VarCurr (v900(VarCurr,bitIndex0)<->v889(VarCurr)).
% 299.21/297.39  all VarCurr (v900(VarCurr,bitIndex1)<->v821(VarCurr)).
% 299.21/297.39  all VarCurr (v897(VarCurr)<-> (v898(VarCurr,bitIndex1)<->$F)& (v898(VarCurr,bitIndex0)<->$T)).
% 299.21/297.39  all VarCurr (v898(VarCurr,bitIndex0)<->v889(VarCurr)).
% 299.21/297.39  all VarCurr (v898(VarCurr,bitIndex1)<->v821(VarCurr)).
% 299.21/297.39  all VarCurr (v895(VarCurr)<-> (v896(VarCurr,bitIndex1)<->$F)& (v896(VarCurr,bitIndex0)<->$F)).
% 299.21/297.39  all VarCurr (v896(VarCurr,bitIndex0)<->v889(VarCurr)).
% 299.21/297.39  all VarCurr (v896(VarCurr,bitIndex1)<->v821(VarCurr)).
% 299.21/297.39  v889(constB0)<->$F.
% 299.21/297.39  all VarCurr (v893(VarCurr)<-> (v817(VarCurr,bitIndex1)<->$F)& (v817(VarCurr,bitIndex0)<->$F)).
% 299.21/297.39  all B (range_1_0(B)-> (v817(constB0,B)<->$F)).
% 299.21/297.39  all VarCurr (v821(VarCurr)<->v823(VarCurr)).
% 299.21/297.39  all VarCurr (v823(VarCurr)<->v825(VarCurr)).
% 299.21/297.39  all VarCurr (v825(VarCurr)<->v827(VarCurr)).
% 299.21/297.39  all VarCurr (v827(VarCurr)<->v829(VarCurr)).
% 299.21/297.39  all VarCurr (-v876(VarCurr)-> (v829(VarCurr)<->v877(VarCurr))).
% 299.21/297.39  all VarCurr (v876(VarCurr)-> (v829(VarCurr)<->$F)).
% 299.21/297.39  all VarCurr (-v878(VarCurr)& -v879(VarCurr)& -v882(VarCurr)& -v883(VarCurr)& -v884(VarCurr)-> (v877(VarCurr)<->v870(VarCurr,bitIndex1))).
% 299.21/297.39  all VarCurr (v884(VarCurr)-> (v877(VarCurr)<->v870(VarCurr,bitIndex2))).
% 299.21/297.39  all VarCurr (v883(VarCurr)-> (v877(VarCurr)<->v870(VarCurr,bitIndex3))).
% 299.21/297.39  all VarCurr (v882(VarCurr)-> (v877(VarCurr)<->v870(VarCurr,bitIndex2))).
% 299.21/297.39  all VarCurr (v879(VarCurr)-> (v877(VarCurr)<->v870(VarCurr,bitIndex3))).
% 299.21/297.39  all VarCurr (v878(VarCurr)-> (v877(VarCurr)<->$F)).
% 299.21/297.39  all VarCurr (v887(VarCurr)<-> (v858(VarCurr,bitIndex2)<->$T)& (v858(VarCurr,bitIndex1)<->$T)& (v858(VarCurr,bitIndex0)<->$T)).
% 299.21/297.39  all VarCurr (v884(VarCurr)<->v885(VarCurr)|v886(VarCurr)).
% 299.21/297.39  all VarCurr (v886(VarCurr)<-> (v858(VarCurr,bitIndex2)<->$T)& (v858(VarCurr,bitIndex1)<->$T)& (v858(VarCurr,bitIndex0)<->$F)).
% 299.21/297.39  all VarCurr (v885(VarCurr)<-> (v858(VarCurr,bitIndex2)<->$T)& (v858(VarCurr,bitIndex1)<->$F)& (v858(VarCurr,bitIndex0)<->$T)).
% 299.21/297.39  all VarCurr (v883(VarCurr)<-> (v858(VarCurr,bitIndex2)<->$T)& (v858(VarCurr,bitIndex1)<->$F)& (v858(VarCurr,bitIndex0)<->$F)).
% 299.21/297.39  all VarCurr (v882(VarCurr)<-> (v858(VarCurr,bitIndex2)<->$F)& (v858(VarCurr,bitIndex1)<->$T)& (v858(VarCurr,bitIndex0)<->$T)).
% 299.21/297.39  all VarCurr (v879(VarCurr)<->v880(VarCurr)|v881(VarCurr)).
% 299.21/297.39  all VarCurr (v881(VarCurr)<-> (v858(VarCurr,bitIndex2)<->$F)& (v858(VarCurr,bitIndex1)<->$T)& (v858(VarCurr,bitIndex0)<->$F)).
% 299.21/297.39  all VarCurr (v880(VarCurr)<-> (v858(VarCurr,bitIndex2)<->$F)& (v858(VarCurr,bitIndex1)<->$F)& (v858(VarCurr,bitIndex0)<->$T)).
% 299.21/297.39  all VarCurr (v878(VarCurr)<-> (v858(VarCurr,bitIndex2)<->$F)& (v858(VarCurr,bitIndex1)<->$F)& (v858(VarCurr,bitIndex0)<->$F)).
% 299.21/297.39  all VarCurr (-v876(VarCurr)<->v831(VarCurr)).
% 299.21/297.39  all VarCurr (v870(VarCurr,bitIndex1)<->v872(VarCurr,bitIndex1)).
% 299.21/297.39  all VarCurr (v872(VarCurr,bitIndex1)<->v874(VarCurr,bitIndex1)).
% 299.21/297.39  all VarCurr (v870(VarCurr,bitIndex2)<->v872(VarCurr,bitIndex2)).
% 299.21/297.39  all VarCurr (v872(VarCurr,bitIndex2)<->v874(VarCurr,bitIndex2)).
% 299.21/297.39  all VarCurr (v870(VarCurr,bitIndex3)<->v872(VarCurr,bitIndex3)).
% 299.21/297.39  all VarCurr (v872(VarCurr,bitIndex3)<->v874(VarCurr,bitIndex3)).
% 299.21/297.39  all B (range_3_0(B)-> (v874(constB0,B)<->$F)).
% 299.21/297.39  all VarCurr B (range_2_0(B)-> (v858(VarCurr,B)<->v860(VarCurr,B))).
% 299.21/297.39  all VarCurr B (range_2_0(B)-> (v860(VarCurr,B)<->v862(VarCurr,B))).
% 299.21/297.39  all VarCurr B (range_2_0(B)-> (v862(VarCurr,B)<->v864(VarCurr,B))).
% 299.21/297.39  all VarCurr B (range_2_0(B)-> (v864(VarCurr,B)<->v866(VarCurr,B))).
% 299.21/297.39  all VarCurr ((v866(VarCurr,bitIndex2)<->v868(VarCurr,bitIndex3))& (v866(VarCurr,bitIndex1)<->v868(VarCurr,bitIndex2))& (v866(VarCurr,bitIndex0)<->v868(VarCurr,bitIndex1))).
% 299.21/297.40  v868(constB0,bitIndex3)<->$F.
% 299.21/297.40  v868(constB0,bitIndex2)<->$F.
% 299.21/297.40  v868(constB0,bitIndex1)<->$F.
% 299.21/297.40  all VarCurr (v831(VarCurr)<->v833(VarCurr)).
% 299.21/297.40  all VarCurr (v833(VarCurr)<->v835(VarCurr)).
% 299.21/297.40  all VarCurr (v835(VarCurr)<->v837(VarCurr)).
% 299.21/297.40  all VarCurr (v837(VarCurr)<->v839(VarCurr)).
% 299.21/297.40  all VarCurr (v839(VarCurr)<->v841(VarCurr)).
% 299.21/297.40  all VarCurr (v841(VarCurr)<->v843(VarCurr)).
% 299.21/297.40  all VarCurr (v843(VarCurr)<->v845(VarCurr)).
% 299.21/297.40  all VarCurr (v845(VarCurr)<->v847(VarCurr)).
% 299.21/297.40  all VarCurr (v847(VarCurr)<->v849(VarCurr)).
% 299.21/297.40  all VarCurr (v849(VarCurr)<->v851(VarCurr)).
% 299.21/297.40  all VarCurr (v851(VarCurr)<->v853(VarCurr)).
% 299.21/297.40  all VarCurr (v853(VarCurr)<->v855(VarCurr,bitIndex2)).
% 299.21/297.40  all B (range_3_0(B)-> (v855(constB0,B)<->$F)).
% 299.21/297.40  all B (range_3_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B).
% 299.21/297.40  -b0000(bitIndex3).
% 299.21/297.40  -b0000(bitIndex2).
% 299.21/297.40  -b0000(bitIndex1).
% 299.21/297.40  -b0000(bitIndex0).
% 299.21/297.40  all VarCurr (v805(VarCurr)<->v228(VarCurr)).
% 299.21/297.40  all VarCurr (v775(VarCurr,bitIndex8)<->v777(VarCurr,bitIndex8)).
% 299.21/297.40  all VarCurr (v777(VarCurr,bitIndex8)<->v779(VarCurr,bitIndex8)).
% 299.21/297.40  all VarCurr (v767(VarCurr)<->v103(VarCurr,bitIndex0)).
% 299.21/297.40  all VarCurr (v765(VarCurr)<->v85(VarCurr,bitIndex0)).
% 299.21/297.40  all VarCurr (v618(VarCurr)<->v620(VarCurr)).
% 299.21/297.40  all VarCurr (v620(VarCurr)<->v622(VarCurr)&v604(VarCurr)).
% 299.21/297.40  all VarCurr (v622(VarCurr)<->v624(VarCurr)).
% 299.21/297.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v739(VarNext)-> (v624(VarNext)<->v624(VarCurr)))).
% 299.21/297.40  all VarNext (v739(VarNext)-> (v624(VarNext)<->v749(VarNext))).
% 299.21/297.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v749(VarNext)<->v747(VarCurr))).
% 299.21/297.40  all VarCurr (-v750(VarCurr)-> (v747(VarCurr)<->x734(VarCurr))).
% 299.21/297.40  all VarCurr (v750(VarCurr)-> (v747(VarCurr)<->v634(VarCurr))).
% 299.21/297.40  all VarCurr (v750(VarCurr)<->v751(VarCurr)&v752(VarCurr)).
% 299.21/297.40  all VarCurr (-v752(VarCurr)<->v628(VarCurr)).
% 299.21/297.40  all VarCurr (-v751(VarCurr)<->v626(VarCurr)).
% 299.21/297.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v739(VarNext)<->v740(VarNext))).
% 299.21/297.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v740(VarNext)<->v741(VarNext)&v736(VarNext))).
% 299.21/297.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v741(VarNext)<->v743(VarNext))).
% 299.21/297.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v743(VarNext)<->v736(VarCurr))).
% 299.21/297.40  v624(constB0)<->$F.
% 299.21/297.40  all VarCurr (v736(VarCurr)<->v103(VarCurr,bitIndex0)).
% 299.21/297.40  all VarCurr (v634(VarCurr)<->v636(VarCurr)).
% 299.21/297.40  all VarCurr (v636(VarCurr)<->v638(VarCurr)).
% 299.21/297.40  all VarCurr (-v154(VarCurr)-> (v638(VarCurr)<->v646(VarCurr))).
% 299.21/297.40  all VarCurr (v154(VarCurr)-> (v638(VarCurr)<->v640(VarCurr))).
% 299.21/297.40  all VarCurr (-v214(VarCurr)-> (v646(VarCurr)<->v658(VarCurr))).
% 299.21/297.40  all VarCurr (v214(VarCurr)-> (v646(VarCurr)<->v648(VarCurr))).
% 299.21/297.40  all VarCurr (v658(VarCurr)<->v660(VarCurr)).
% 299.21/297.40  all VarCurr (v660(VarCurr)<->v662(VarCurr)).
% 299.21/297.40  all VarCurr (v662(VarCurr)<->v664(VarCurr)).
% 299.21/297.40  all VarCurr (v664(VarCurr)<->v666(VarCurr)).
% 299.21/297.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v717(VarNext)-> (v666(VarNext)<->v666(VarCurr)))).
% 299.21/297.40  all VarNext (v717(VarNext)-> (v666(VarNext)<->v727(VarNext))).
% 299.21/297.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v727(VarNext)<->v725(VarCurr))).
% 299.21/297.40  all VarCurr (-v728(VarCurr)-> (v725(VarCurr)<->v729(VarCurr))).
% 299.21/297.40  all VarCurr (v728(VarCurr)-> (v725(VarCurr)<->$F)).
% 299.21/297.40  all VarCurr (v729(VarCurr)<->v672(VarCurr)&v698(VarCurr)).
% 299.21/297.40  all VarCurr (-v728(VarCurr)<->v668(VarCurr)).
% 299.21/297.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v717(VarNext)<->v718(VarNext))).
% 299.21/297.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v718(VarNext)<->v719(VarNext)&v712(VarNext))).
% 299.21/297.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v719(VarNext)<->v721(VarNext))).
% 299.21/297.40  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v721(VarNext)<->v712(VarCurr))).
% 299.21/297.40  v666(constB0)<->$F.
% 299.21/297.40  all VarCurr (v712(VarCurr)<->v714(VarCurr)).
% 299.21/297.40  all VarCurr (v714(VarCurr)<->v274(VarCurr)).
% 299.21/297.40  all VarCurr (v698(VarCurr)<->v700(VarCurr)).
% 299.21/297.40  all VarCurr (v700(VarCurr)<->v702(VarCurr)).
% 299.21/297.40  all VarCurr (v702(VarCurr)<->v704(VarCurr)).
% 299.21/297.40  all VarCurr (v704(VarCurr)<->v706(VarCurr)&v710(VarCurr)).
% 299.21/297.40  all VarCurr (-v710(VarCurr)<->v708(VarCurr)).
% 299.21/297.40  v708(constB0)<->$F.
% 299.21/297.40  v706(constB0)<->$F.
% 299.21/297.40  all VarCurr (v672(VarCurr)<->v674(VarCurr)).
% 299.21/297.41  all VarCurr (v674(VarCurr)<->v676(VarCurr)).
% 299.21/297.41  all VarCurr (v676(VarCurr)<->v678(VarCurr)).
% 299.21/297.41  all VarCurr (v678(VarCurr)<->v680(VarCurr)).
% 299.21/297.41  all VarCurr (v680(VarCurr)<->v682(VarCurr)).
% 299.21/297.41  all VarCurr (v682(VarCurr)<->v684(VarCurr)).
% 299.21/297.41  all VarCurr (v684(VarCurr)<->v686(VarCurr)).
% 299.21/297.41  all VarCurr (v686(VarCurr)<->v688(VarCurr)).
% 299.21/297.41  all VarCurr (v688(VarCurr)<->v690(VarCurr)).
% 299.21/297.41  all VarCurr (v690(VarCurr)<->v692(VarCurr)).
% 299.21/297.41  all VarCurr (v692(VarCurr)<->v694(VarCurr)).
% 299.21/297.41  all VarCurr (v694(VarCurr)<->v696(VarCurr)).
% 299.21/297.41  v696(constB0)<->$F.
% 299.21/297.41  all VarCurr (v668(VarCurr)<->v670(VarCurr)).
% 299.21/297.41  all VarCurr (v670(VarCurr)<->v228(VarCurr)).
% 299.21/297.41  all VarCurr (v648(VarCurr)<->v650(VarCurr)).
% 299.21/297.41  all VarCurr (v650(VarCurr)<->v652(VarCurr)).
% 299.21/297.41  all VarCurr (v652(VarCurr)<->v654(VarCurr)).
% 299.21/297.41  all VarCurr (-v656(VarCurr)& -v407(VarCurr)-> (v654(VarCurr)<->$F)).
% 299.21/297.41  all VarCurr (v407(VarCurr)-> (v654(VarCurr)<->v442(VarCurr))).
% 299.21/297.41  all VarCurr (v656(VarCurr)-> (v654(VarCurr)<->$F)).
% 299.21/297.41  all VarCurr (v656(VarCurr)<->v405(VarCurr)|v406(VarCurr)).
% 299.21/297.41  all VarCurr (v640(VarCurr)<->v642(VarCurr)).
% 299.21/297.41  all VarCurr (v642(VarCurr)<->v644(VarCurr)).
% 299.21/297.41  all VarCurr (v628(VarCurr)<->v184(VarCurr,bitIndex0)).
% 299.21/297.41  all VarCurr (v184(VarCurr,bitIndex0)<->v630(VarCurr)).
% 299.21/297.41  all VarCurr (v630(VarCurr)<->v632(VarCurr)).
% 299.21/297.41  all VarCurr (v632(VarCurr)<->v190(VarCurr)).
% 299.21/297.41  all VarCurr (v626(VarCurr)<->v85(VarCurr,bitIndex0)).
% 299.21/297.41  all VarCurr (v73(VarCurr)<->v75(VarCurr)).
% 299.21/297.41  all VarCurr (v75(VarCurr)<->v77(VarCurr)&v604(VarCurr)).
% 299.21/297.41  all VarCurr (-v604(VarCurr)<->v606(VarCurr)).
% 299.21/297.41  all VarCurr (v606(VarCurr)<->v608(VarCurr)).
% 299.21/297.41  all VarCurr (v608(VarCurr)<->v610(VarCurr)).
% 299.21/297.41  all VarCurr (v610(VarCurr)<->v612(VarCurr)).
% 299.21/297.41  all VarCurr (v612(VarCurr)<->v614(VarCurr)).
% 299.21/297.41  all VarCurr (v77(VarCurr)<->v79(VarCurr)).
% 299.21/297.41  all VarCurr (v79(VarCurr)<->v81(VarCurr)).
% 299.21/297.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v585(VarNext)-> (v81(VarNext)<->v81(VarCurr)))).
% 299.21/297.41  all VarNext (v585(VarNext)-> (v81(VarNext)<->v597(VarNext))).
% 299.21/297.41  all VarCurr (-v586(VarCurr)-> (v597(VarCurr)<->v598(VarCurr))).
% 299.21/297.41  all VarCurr (v586(VarCurr)-> (v597(VarCurr)<->v148(VarCurr))).
% 299.21/297.41  all VarCurr (-v591(VarCurr)-> (v598(VarCurr)<->v168(VarCurr))).
% 299.21/297.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v591(VarCurr)-> (v598(VarCurr)<->x166(VarNext)))).
% 299.21/297.41  all VarCurr (v585(VarCurr)<->v586(VarCurr)|v589(VarCurr)).
% 299.21/297.41  all VarCurr (v589(VarCurr)<->v590(VarCurr)&v596(VarCurr)).
% 299.21/297.41  all VarCurr (-v596(VarCurr)<->v586(VarCurr)).
% 299.21/297.41  all VarCurr (v590(VarCurr)<->v591(VarCurr)|v593(VarCurr)).
% 299.21/297.41  all VarCurr (v593(VarCurr)<->v594(VarCurr)&v595(VarCurr)).
% 299.21/297.41  all VarCurr (-v595(VarCurr)<->v591(VarCurr)).
% 299.21/297.41  all VarCurr (v594(VarCurr)<->v83(VarCurr)&v101(VarCurr)).
% 299.21/297.41  all VarCurr (v591(VarCurr)<->v83(VarCurr)&v592(VarCurr)).
% 299.21/297.41  all VarCurr (-v592(VarCurr)<->v101(VarCurr)).
% 299.21/297.41  all VarCurr (v586(VarCurr)<->v587(VarCurr)&v588(VarCurr)).
% 299.21/297.41  all VarCurr (-v588(VarCurr)<->v101(VarCurr)).
% 299.21/297.41  all VarCurr (-v587(VarCurr)<->v83(VarCurr)).
% 299.21/297.41  v81(constB0)<->$F.
% 299.21/297.41  all VarCurr (v168(VarCurr)<->v170(VarCurr,bitIndex29)).
% 299.21/297.41  all VarCurr (v170(VarCurr,bitIndex29)<->v172(VarCurr)).
% 299.21/297.41  all VarCurr (v172(VarCurr)<->v174(VarCurr)).
% 299.21/297.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v567(VarNext)-> (v174(VarNext)<->v174(VarCurr)))).
% 299.21/297.41  all VarNext (v567(VarNext)-> (v174(VarNext)<->v577(VarNext))).
% 299.21/297.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v577(VarNext)<->v575(VarCurr))).
% 299.21/297.41  all VarCurr (-v578(VarCurr)-> (v575(VarCurr)<->x552(VarCurr))).
% 299.21/297.41  all VarCurr (v578(VarCurr)-> (v575(VarCurr)<->v200(VarCurr))).
% 299.21/297.41  all VarCurr (v578(VarCurr)<->v579(VarCurr)&v580(VarCurr)).
% 299.21/297.41  all VarCurr (-v580(VarCurr)<->v182(VarCurr)).
% 299.21/297.41  all VarCurr (-v579(VarCurr)<->v176(VarCurr)).
% 299.21/297.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v567(VarNext)<->v568(VarNext))).
% 299.21/297.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v568(VarNext)<->v569(VarNext)&v554(VarNext))).
% 299.21/297.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v569(VarNext)<->v571(VarNext))).
% 299.21/297.41  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v571(VarNext)<->v554(VarCurr))).
% 299.21/297.41  v174(constB0)<->$F.
% 299.21/297.41  all VarCurr (v554(VarCurr)<->v103(VarCurr,bitIndex1)).
% 299.21/297.41  all VarCurr (v103(VarCurr,bitIndex1)<->v556(VarCurr)).
% 299.21/297.41  all VarCurr (v556(VarCurr)<->v564(VarCurr)|v562(VarCurr)).
% 299.21/297.42  all VarCurr (v564(VarCurr)<->v558(VarCurr)&v560(VarCurr)).
% 299.21/297.42  v560(constB0)<->$F.
% 299.21/297.42  all VarCurr (v562(VarCurr)<->v136(VarCurr)).
% 299.21/297.42  all VarCurr (v558(VarCurr)<->v15(VarCurr)).
% 299.21/297.42  all VarCurr (v200(VarCurr)<->v202(VarCurr,bitIndex30)).
% 299.21/297.42  all VarCurr (v202(VarCurr,bitIndex30)<->v204(VarCurr,bitIndex30)).
% 299.21/297.42  all VarCurr (v204(VarCurr,bitIndex30)<->v546(VarCurr,bitIndex30)).
% 299.21/297.42  all VarCurr (-v154(VarCurr)-> (all B (range_59_0(B)-> (v546(VarCurr,B)<->v550(VarCurr,B))))).
% 299.21/297.42  all VarCurr (v154(VarCurr)-> (all B (range_59_0(B)-> (v546(VarCurr,B)<->v547(VarCurr,B))))).
% 299.21/297.42  all B (range_59_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B|bitIndex6=B|bitIndex7=B|bitIndex8=B|bitIndex9=B|bitIndex10=B|bitIndex11=B|bitIndex12=B|bitIndex13=B|bitIndex14=B|bitIndex15=B|bitIndex16=B|bitIndex17=B|bitIndex18=B|bitIndex19=B|bitIndex20=B|bitIndex21=B|bitIndex22=B|bitIndex23=B|bitIndex24=B|bitIndex25=B|bitIndex26=B|bitIndex27=B|bitIndex28=B|bitIndex29=B|bitIndex30=B|bitIndex31=B|bitIndex32=B|bitIndex33=B|bitIndex34=B|bitIndex35=B|bitIndex36=B|bitIndex37=B|bitIndex38=B|bitIndex39=B|bitIndex40=B|bitIndex41=B|bitIndex42=B|bitIndex43=B|bitIndex44=B|bitIndex45=B|bitIndex46=B|bitIndex47=B|bitIndex48=B|bitIndex49=B|bitIndex50=B|bitIndex51=B|bitIndex52=B|bitIndex53=B|bitIndex54=B|bitIndex55=B|bitIndex56=B|bitIndex57=B|bitIndex58=B|bitIndex59=B).
% 299.21/297.42  all VarCurr B (range_51_0(B)-> (v550(VarCurr,B)<->v212(VarCurr,B))).
% 299.21/297.42  all B (range_51_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B|bitIndex6=B|bitIndex7=B|bitIndex8=B|bitIndex9=B|bitIndex10=B|bitIndex11=B|bitIndex12=B|bitIndex13=B|bitIndex14=B|bitIndex15=B|bitIndex16=B|bitIndex17=B|bitIndex18=B|bitIndex19=B|bitIndex20=B|bitIndex21=B|bitIndex22=B|bitIndex23=B|bitIndex24=B|bitIndex25=B|bitIndex26=B|bitIndex27=B|bitIndex28=B|bitIndex29=B|bitIndex30=B|bitIndex31=B|bitIndex32=B|bitIndex33=B|bitIndex34=B|bitIndex35=B|bitIndex36=B|bitIndex37=B|bitIndex38=B|bitIndex39=B|bitIndex40=B|bitIndex41=B|bitIndex42=B|bitIndex43=B|bitIndex44=B|bitIndex45=B|bitIndex46=B|bitIndex47=B|bitIndex48=B|bitIndex49=B|bitIndex50=B|bitIndex51=B).
% 299.21/297.42  all VarCurr ((v550(VarCurr,bitIndex59)<->$F)& (v550(VarCurr,bitIndex58)<->$F)& (v550(VarCurr,bitIndex57)<->$F)& (v550(VarCurr,bitIndex56)<->$F)& (v550(VarCurr,bitIndex55)<->$F)& (v550(VarCurr,bitIndex54)<->$F)& (v550(VarCurr,bitIndex53)<->$F)& (v550(VarCurr,bitIndex52)<->$F)).
% 299.21/297.42  -b00000000(bitIndex7).
% 299.21/297.42  -b00000000(bitIndex6).
% 299.21/297.42  -b00000000(bitIndex5).
% 299.21/297.42  -b00000000(bitIndex4).
% 299.21/297.42  -b00000000(bitIndex3).
% 299.21/297.42  -b00000000(bitIndex2).
% 299.21/297.42  -b00000000(bitIndex1).
% 299.21/297.42  -b00000000(bitIndex0).
% 299.21/297.42  all VarCurr B (range_55_0(B)-> (v547(VarCurr,B)<->v548(VarCurr,B))).
% 299.21/297.42  all B (range_55_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B|bitIndex6=B|bitIndex7=B|bitIndex8=B|bitIndex9=B|bitIndex10=B|bitIndex11=B|bitIndex12=B|bitIndex13=B|bitIndex14=B|bitIndex15=B|bitIndex16=B|bitIndex17=B|bitIndex18=B|bitIndex19=B|bitIndex20=B|bitIndex21=B|bitIndex22=B|bitIndex23=B|bitIndex24=B|bitIndex25=B|bitIndex26=B|bitIndex27=B|bitIndex28=B|bitIndex29=B|bitIndex30=B|bitIndex31=B|bitIndex32=B|bitIndex33=B|bitIndex34=B|bitIndex35=B|bitIndex36=B|bitIndex37=B|bitIndex38=B|bitIndex39=B|bitIndex40=B|bitIndex41=B|bitIndex42=B|bitIndex43=B|bitIndex44=B|bitIndex45=B|bitIndex46=B|bitIndex47=B|bitIndex48=B|bitIndex49=B|bitIndex50=B|bitIndex51=B|bitIndex52=B|bitIndex53=B|bitIndex54=B|bitIndex55=B).
% 299.21/297.42  all VarCurr ((v547(VarCurr,bitIndex59)<->v206(VarCurr,bitIndex3))& (v547(VarCurr,bitIndex58)<->v206(VarCurr,bitIndex2))& (v547(VarCurr,bitIndex57)<->v206(VarCurr,bitIndex1))& (v547(VarCurr,bitIndex56)<->v206(VarCurr,bitIndex0))).
% 299.21/297.42  all VarCurr B (range_7_0(B)-> (v548(VarCurr,B)<->v549(VarCurr,B))).
% 299.21/297.42  all VarCurr ((v548(VarCurr,bitIndex15)<->v549(VarCurr,bitIndex7))& (v548(VarCurr,bitIndex14)<->v549(VarCurr,bitIndex6))& (v548(VarCurr,bitIndex13)<->v549(VarCurr,bitIndex5))& (v548(VarCurr,bitIndex12)<->v549(VarCurr,bitIndex4))& (v548(VarCurr,bitIndex11)<->v549(VarCurr,bitIndex3))& (v548(VarCurr,bitIndex10)<->v549(VarCurr,bitIndex2))& (v548(VarCurr,bitIndex9)<->v549(VarCurr,bitIndex1))& (v548(VarCurr,bitIndex8)<->v549(VarCurr,bitIndex0))).
% 299.21/297.43  all VarCurr ((v548(VarCurr,bitIndex23)<->v549(VarCurr,bitIndex7))& (v548(VarCurr,bitIndex22)<->v549(VarCurr,bitIndex6))& (v548(VarCurr,bitIndex21)<->v549(VarCurr,bitIndex5))& (v548(VarCurr,bitIndex20)<->v549(VarCurr,bitIndex4))& (v548(VarCurr,bitIndex19)<->v549(VarCurr,bitIndex3))& (v548(VarCurr,bitIndex18)<->v549(VarCurr,bitIndex2))& (v548(VarCurr,bitIndex17)<->v549(VarCurr,bitIndex1))& (v548(VarCurr,bitIndex16)<->v549(VarCurr,bitIndex0))).
% 299.21/297.43  all VarCurr ((v548(VarCurr,bitIndex31)<->v549(VarCurr,bitIndex7))& (v548(VarCurr,bitIndex30)<->v549(VarCurr,bitIndex6))& (v548(VarCurr,bitIndex29)<->v549(VarCurr,bitIndex5))& (v548(VarCurr,bitIndex28)<->v549(VarCurr,bitIndex4))& (v548(VarCurr,bitIndex27)<->v549(VarCurr,bitIndex3))& (v548(VarCurr,bitIndex26)<->v549(VarCurr,bitIndex2))& (v548(VarCurr,bitIndex25)<->v549(VarCurr,bitIndex1))& (v548(VarCurr,bitIndex24)<->v549(VarCurr,bitIndex0))).
% 299.21/297.43  all VarCurr ((v548(VarCurr,bitIndex39)<->v549(VarCurr,bitIndex7))& (v548(VarCurr,bitIndex38)<->v549(VarCurr,bitIndex6))& (v548(VarCurr,bitIndex37)<->v549(VarCurr,bitIndex5))& (v548(VarCurr,bitIndex36)<->v549(VarCurr,bitIndex4))& (v548(VarCurr,bitIndex35)<->v549(VarCurr,bitIndex3))& (v548(VarCurr,bitIndex34)<->v549(VarCurr,bitIndex2))& (v548(VarCurr,bitIndex33)<->v549(VarCurr,bitIndex1))& (v548(VarCurr,bitIndex32)<->v549(VarCurr,bitIndex0))).
% 299.21/297.43  all VarCurr ((v548(VarCurr,bitIndex47)<->v549(VarCurr,bitIndex7))& (v548(VarCurr,bitIndex46)<->v549(VarCurr,bitIndex6))& (v548(VarCurr,bitIndex45)<->v549(VarCurr,bitIndex5))& (v548(VarCurr,bitIndex44)<->v549(VarCurr,bitIndex4))& (v548(VarCurr,bitIndex43)<->v549(VarCurr,bitIndex3))& (v548(VarCurr,bitIndex42)<->v549(VarCurr,bitIndex2))& (v548(VarCurr,bitIndex41)<->v549(VarCurr,bitIndex1))& (v548(VarCurr,bitIndex40)<->v549(VarCurr,bitIndex0))).
% 299.21/297.43  all VarCurr ((v548(VarCurr,bitIndex55)<->v549(VarCurr,bitIndex7))& (v548(VarCurr,bitIndex54)<->v549(VarCurr,bitIndex6))& (v548(VarCurr,bitIndex53)<->v549(VarCurr,bitIndex5))& (v548(VarCurr,bitIndex52)<->v549(VarCurr,bitIndex4))& (v548(VarCurr,bitIndex51)<->v549(VarCurr,bitIndex3))& (v548(VarCurr,bitIndex50)<->v549(VarCurr,bitIndex2))& (v548(VarCurr,bitIndex49)<->v549(VarCurr,bitIndex1))& (v548(VarCurr,bitIndex48)<->v549(VarCurr,bitIndex0))).
% 299.21/297.43  all VarCurr B (range_7_0(B)-> (v549(VarCurr,B)<->v206(VarCurr,B))).
% 299.21/297.43  all B (range_7_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B|bitIndex6=B|bitIndex7=B).
% 299.21/297.43  all VarCurr (v212(VarCurr,bitIndex30)<->v545(VarCurr,bitIndex30)).
% 299.21/297.43  all VarCurr (-v214(VarCurr)-> (all B (range_47_0(B)-> (v545(VarCurr,B)<->v535(VarCurr,B))))).
% 299.21/297.43  all VarCurr (v214(VarCurr)-> (all B (range_47_0(B)-> (v545(VarCurr,B)<->v294(VarCurr,B))))).
% 299.21/297.43  all VarCurr (v535(VarCurr,bitIndex30)<->v537(VarCurr,bitIndex30)).
% 299.21/297.43  all VarCurr (v537(VarCurr,bitIndex30)<->v539(VarCurr,bitIndex30)).
% 299.21/297.43  all VarCurr (v539(VarCurr,bitIndex30)<->v541(VarCurr,bitIndex30)).
% 299.21/297.43  all VarCurr (v541(VarCurr,bitIndex30)<->v543(VarCurr,bitIndex37)).
% 299.21/297.43  all B (range_63_0(B)-> (v543(constB0,B)<->$F)).
% 299.21/297.43  all B (range_63_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B|bitIndex6=B|bitIndex7=B|bitIndex8=B|bitIndex9=B|bitIndex10=B|bitIndex11=B|bitIndex12=B|bitIndex13=B|bitIndex14=B|bitIndex15=B|bitIndex16=B|bitIndex17=B|bitIndex18=B|bitIndex19=B|bitIndex20=B|bitIndex21=B|bitIndex22=B|bitIndex23=B|bitIndex24=B|bitIndex25=B|bitIndex26=B|bitIndex27=B|bitIndex28=B|bitIndex29=B|bitIndex30=B|bitIndex31=B|bitIndex32=B|bitIndex33=B|bitIndex34=B|bitIndex35=B|bitIndex36=B|bitIndex37=B|bitIndex38=B|bitIndex39=B|bitIndex40=B|bitIndex41=B|bitIndex42=B|bitIndex43=B|bitIndex44=B|bitIndex45=B|bitIndex46=B|bitIndex47=B|bitIndex48=B|bitIndex49=B|bitIndex50=B|bitIndex51=B|bitIndex52=B|bitIndex53=B|bitIndex54=B|bitIndex55=B|bitIndex56=B|bitIndex57=B|bitIndex58=B|bitIndex59=B|bitIndex60=B|bitIndex61=B|bitIndex62=B|bitIndex63=B).
% 299.21/297.43  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex63).
% 299.21/297.43  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex62).
% 299.21/297.43  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex61).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex60).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex59).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex58).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex57).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex56).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex55).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex54).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex53).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex52).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex51).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex50).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex49).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex48).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex47).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex46).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex45).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex44).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex43).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex42).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex41).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex40).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex39).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex38).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex37).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex36).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex35).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex34).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex33).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex32).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex31).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex30).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex29).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex28).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex27).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex26).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex25).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex24).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex23).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex22).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex21).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex20).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex19).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex18).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex17).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex16).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex15).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex14).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex13).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex12).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex11).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex10).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex9).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex8).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex7).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex6).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex5).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex4).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex3).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex2).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex1).
% 299.21/297.44  -b0000000000000000000000000000000000000000000000000000000000000000(bitIndex0).
% 299.21/297.44  all VarCurr (v294(VarCurr,bitIndex30)<->v296(VarCurr,bitIndex30)).
% 299.21/297.44  all VarCurr (v296(VarCurr,bitIndex30)<->v298(VarCurr,bitIndex30)).
% 299.21/297.44  all VarCurr (v298(VarCurr,bitIndex30)<->v300(VarCurr,bitIndex30)).
% 299.21/297.44  all VarCurr (v300(VarCurr,bitIndex30)<->v523(VarCurr,bitIndex30)).
% 299.21/297.44  all VarCurr (-v525(VarCurr)& -v527(VarCurr)& -v528(VarCurr)& -v529(VarCurr)& -v530(VarCurr)& -v531(VarCurr)& -v532(VarCurr)-> (all B (range_47_0(B)-> (v523(VarCurr,B)<->v302(VarCurr,B))))).
% 299.21/297.44  all VarCurr (v532(VarCurr)-> (v523(VarCurr,bitIndex47)<->v302(VarCurr,bitIndex95))& (v523(VarCurr,bitIndex46)<->v302(VarCurr,bitIndex94))& (v523(VarCurr,bitIndex45)<->v302(VarCurr,bitIndex93))& (v523(VarCurr,bitIndex44)<->v302(VarCurr,bitIndex92))& (v523(VarCurr,bitIndex43)<->v302(VarCurr,bitIndex91))& (v523(VarCurr,bitIndex42)<->v302(VarCurr,bitIndex90))& (v523(VarCurr,bitIndex41)<->v302(VarCurr,bitIndex89))& (v523(VarCurr,bitIndex40)<->v302(VarCurr,bitIndex88))& (v523(VarCurr,bitIndex39)<->v302(VarCurr,bitIndex87))& (v523(VarCurr,bitIndex38)<->v302(VarCurr,bitIndex86))& (v523(VarCurr,bitIndex37)<->v302(VarCurr,bitIndex85))& (v523(VarCurr,bitIndex36)<->v302(VarCurr,bitIndex84))& (v523(VarCurr,bitIndex35)<->v302(VarCurr,bitIndex83))& (v523(VarCurr,bitIndex34)<->v302(VarCurr,bitIndex82))& (v523(VarCurr,bitIndex33)<->v302(VarCurr,bitIndex81))& (v523(VarCurr,bitIndex32)<->v302(VarCurr,bitIndex80))& (v523(VarCurr,bitIndex31)<->v302(VarCurr,bitIndex79))& (v523(VarCurr,bitIndex30)<->v302(VarCurr,bitIndex78))& (v523(VarCurr,bitIndex29)<->v302(VarCurr,bitIndex77))& (v523(VarCurr,bitIndex28)<->v302(VarCurr,bitIndex76))& (v523(VarCurr,bitIndex27)<->v302(VarCurr,bitIndex75))& (v523(VarCurr,bitIndex26)<->v302(VarCurr,bitIndex74))& (v523(VarCurr,bitIndex25)<->v302(VarCurr,bitIndex73))& (v523(VarCurr,bitIndex24)<->v302(VarCurr,bitIndex72))& (v523(VarCurr,bitIndex23)<->v302(VarCurr,bitIndex71))& (v523(VarCurr,bitIndex22)<->v302(VarCurr,bitIndex70))& (v523(VarCurr,bitIndex21)<->v302(VarCurr,bitIndex69))& (v523(VarCurr,bitIndex20)<->v302(VarCurr,bitIndex68))& (v523(VarCurr,bitIndex19)<->v302(VarCurr,bitIndex67))& (v523(VarCurr,bitIndex18)<->v302(VarCurr,bitIndex66))& (v523(VarCurr,bitIndex17)<->v302(VarCurr,bitIndex65))& (v523(VarCurr,bitIndex16)<->v302(VarCurr,bitIndex64))& (v523(VarCurr,bitIndex15)<->v302(VarCurr,bitIndex63))& (v523(VarCurr,bitIndex14)<->v302(VarCurr,bitIndex62))& (v523(VarCurr,bitIndex13)<->v302(VarCurr,bitIndex61))& (v523(VarCurr,bitIndex12)<->v302(VarCurr,bitIndex60))& (v523(VarCurr,bitIndex11)<->v302(VarCurr,bitIndex59))& (v523(VarCurr,bitIndex10)<->v302(VarCurr,bitIndex58))& (v523(VarCurr,bitIndex9)<->v302(VarCurr,bitIndex57))& (v523(VarCurr,bitIndex8)<->v302(VarCurr,bitIndex56))& (v523(VarCurr,bitIndex7)<->v302(VarCurr,bitIndex55))& (v523(VarCurr,bitIndex6)<->v302(VarCurr,bitIndex54))& (v523(VarCurr,bitIndex5)<->v302(VarCurr,bitIndex53))& (v523(VarCurr,bitIndex4)<->v302(VarCurr,bitIndex52))& (v523(VarCurr,bitIndex3)<->v302(VarCurr,bitIndex51))& (v523(VarCurr,bitIndex2)<->v302(VarCurr,bitIndex50))& (v523(VarCurr,bitIndex1)<->v302(VarCurr,bitIndex49))& (v523(VarCurr,bitIndex0)<->v302(VarCurr,bitIndex48))).
% 299.21/297.44  all VarCurr (v531(VarCurr)-> (v523(VarCurr,bitIndex47)<->v302(VarCurr,bitIndex143))& (v523(VarCurr,bitIndex46)<->v302(VarCurr,bitIndex142))& (v523(VarCurr,bitIndex45)<->v302(VarCurr,bitIndex141))& (v523(VarCurr,bitIndex44)<->v302(VarCurr,bitIndex140))& (v523(VarCurr,bitIndex43)<->v302(VarCurr,bitIndex139))& (v523(VarCurr,bitIndex42)<->v302(VarCurr,bitIndex138))& (v523(VarCurr,bitIndex41)<->v302(VarCurr,bitIndex137))& (v523(VarCurr,bitIndex40)<->v302(VarCurr,bitIndex136))& (v523(VarCurr,bitIndex39)<->v302(VarCurr,bitIndex135))& (v523(VarCurr,bitIndex38)<->v302(VarCurr,bitIndex134))& (v523(VarCurr,bitIndex37)<->v302(VarCurr,bitIndex133))& (v523(VarCurr,bitIndex36)<->v302(VarCurr,bitIndex132))& (v523(VarCurr,bitIndex35)<->v302(VarCurr,bitIndex131))& (v523(VarCurr,bitIndex34)<->v302(VarCurr,bitIndex130))& (v523(VarCurr,bitIndex33)<->v302(VarCurr,bitIndex129))& (v523(VarCurr,bitIndex32)<->v302(VarCurr,bitIndex128))& (v523(VarCurr,bitIndex31)<->v302(VarCurr,bitIndex127))& (v523(VarCurr,bitIndex30)<->v302(VarCurr,bitIndex126))& (v523(VarCurr,bitIndex29)<->v302(VarCurr,bitIndex125))& (v523(VarCurr,bitIndex28)<->v302(VarCurr,bitIndex124))& (v523(VarCurr,bitIndex27)<->v302(VarCurr,bitIndex123))& (v523(VarCurr,bitIndex26)<->v302(VarCurr,bitIndex122))& (v523(VarCurr,bitIndex25)<->v302(VarCurr,bitIndex121))& (v523(VarCurr,bitIndex24)<->v302(VarCurr,bitIndex120))& (v523(VarCurr,bitIndex23)<->v302(VarCurr,bitIndex119))& (v523(VarCurr,bitIndex22)<->v302(VarCurr,bitIndex118))& (v523(VarCurr,bitIndex21)<->v302(VarCurr,bitIndex117))& (v523(VarCurr,bitIndex20)<->v302(VarCurr,bitIndex116))& (v523(VarCurr,bitIndex19)<->v302(VarCurr,bitIndex115))& (v523(VarCurr,bitIndex18)<->v302(VarCurr,bitIndex114))& (v523(VarCurr,bitIndex17)<->v302(VarCurr,bitIndex113))& (v523(VarCurr,bitIndex16)<->v302(VarCurr,bitIndex112))& (v523(VarCurr,bitIndex15)<->v302(VarCurr,bitIndex111))& (v523(VarCurr,bitIndex14)<->v302(VarCurr,bitIndex110))& (v523(VarCurr,bitIndex13)<->v302(VarCurr,bitIndex109))& (v523(VarCurr,bitIndex12)<->v302(VarCurr,bitIndex108))& (v523(VarCurr,bitIndex11)<->v302(VarCurr,bitIndex107))& (v523(VarCurr,bitIndex10)<->v302(VarCurr,bitIndex106))& (v523(VarCurr,bitIndex9)<->v302(VarCurr,bitIndex105))& (v523(VarCurr,bitIndex8)<->v302(VarCurr,bitIndex104))& (v523(VarCurr,bitIndex7)<->v302(VarCurr,bitIndex103))& (v523(VarCurr,bitIndex6)<->v302(VarCurr,bitIndex102))& (v523(VarCurr,bitIndex5)<->v302(VarCurr,bitIndex101))& (v523(VarCurr,bitIndex4)<->v302(VarCurr,bitIndex100))& (v523(VarCurr,bitIndex3)<->v302(VarCurr,bitIndex99))& (v523(VarCurr,bitIndex2)<->v302(VarCurr,bitIndex98))& (v523(VarCurr,bitIndex1)<->v302(VarCurr,bitIndex97))& (v523(VarCurr,bitIndex0)<->v302(VarCurr,bitIndex96))).
% 299.21/297.45  all VarCurr (v530(VarCurr)-> (v523(VarCurr,bitIndex47)<->v302(VarCurr,bitIndex191))& (v523(VarCurr,bitIndex46)<->v302(VarCurr,bitIndex190))& (v523(VarCurr,bitIndex45)<->v302(VarCurr,bitIndex189))& (v523(VarCurr,bitIndex44)<->v302(VarCurr,bitIndex188))& (v523(VarCurr,bitIndex43)<->v302(VarCurr,bitIndex187))& (v523(VarCurr,bitIndex42)<->v302(VarCurr,bitIndex186))& (v523(VarCurr,bitIndex41)<->v302(VarCurr,bitIndex185))& (v523(VarCurr,bitIndex40)<->v302(VarCurr,bitIndex184))& (v523(VarCurr,bitIndex39)<->v302(VarCurr,bitIndex183))& (v523(VarCurr,bitIndex38)<->v302(VarCurr,bitIndex182))& (v523(VarCurr,bitIndex37)<->v302(VarCurr,bitIndex181))& (v523(VarCurr,bitIndex36)<->v302(VarCurr,bitIndex180))& (v523(VarCurr,bitIndex35)<->v302(VarCurr,bitIndex179))& (v523(VarCurr,bitIndex34)<->v302(VarCurr,bitIndex178))& (v523(VarCurr,bitIndex33)<->v302(VarCurr,bitIndex177))& (v523(VarCurr,bitIndex32)<->v302(VarCurr,bitIndex176))& (v523(VarCurr,bitIndex31)<->v302(VarCurr,bitIndex175))& (v523(VarCurr,bitIndex30)<->v302(VarCurr,bitIndex174))& (v523(VarCurr,bitIndex29)<->v302(VarCurr,bitIndex173))& (v523(VarCurr,bitIndex28)<->v302(VarCurr,bitIndex172))& (v523(VarCurr,bitIndex27)<->v302(VarCurr,bitIndex171))& (v523(VarCurr,bitIndex26)<->v302(VarCurr,bitIndex170))& (v523(VarCurr,bitIndex25)<->v302(VarCurr,bitIndex169))& (v523(VarCurr,bitIndex24)<->v302(VarCurr,bitIndex168))& (v523(VarCurr,bitIndex23)<->v302(VarCurr,bitIndex167))& (v523(VarCurr,bitIndex22)<->v302(VarCurr,bitIndex166))& (v523(VarCurr,bitIndex21)<->v302(VarCurr,bitIndex165))& (v523(VarCurr,bitIndex20)<->v302(VarCurr,bitIndex164))& (v523(VarCurr,bitIndex19)<->v302(VarCurr,bitIndex163))& (v523(VarCurr,bitIndex18)<->v302(VarCurr,bitIndex162))& (v523(VarCurr,bitIndex17)<->v302(VarCurr,bitIndex161))& (v523(VarCurr,bitIndex16)<->v302(VarCurr,bitIndex160))& (v523(VarCurr,bitIndex15)<->v302(VarCurr,bitIndex159))& (v523(VarCurr,bitIndex14)<->v302(VarCurr,bitIndex158))& (v523(VarCurr,bitIndex13)<->v302(VarCurr,bitIndex157))& (v523(VarCurr,bitIndex12)<->v302(VarCurr,bitIndex156))& (v523(VarCurr,bitIndex11)<->v302(VarCurr,bitIndex155))& (v523(VarCurr,bitIndex10)<->v302(VarCurr,bitIndex154))& (v523(VarCurr,bitIndex9)<->v302(VarCurr,bitIndex153))& (v523(VarCurr,bitIndex8)<->v302(VarCurr,bitIndex152))& (v523(VarCurr,bitIndex7)<->v302(VarCurr,bitIndex151))& (v523(VarCurr,bitIndex6)<->v302(VarCurr,bitIndex150))& (v523(VarCurr,bitIndex5)<->v302(VarCurr,bitIndex149))& (v523(VarCurr,bitIndex4)<->v302(VarCurr,bitIndex148))& (v523(VarCurr,bitIndex3)<->v302(VarCurr,bitIndex147))& (v523(VarCurr,bitIndex2)<->v302(VarCurr,bitIndex146))& (v523(VarCurr,bitIndex1)<->v302(VarCurr,bitIndex145))& (v523(VarCurr,bitIndex0)<->v302(VarCurr,bitIndex144))).
% 299.21/297.45  all VarCurr (v529(VarCurr)-> (v523(VarCurr,bitIndex47)<->v302(VarCurr,bitIndex239))& (v523(VarCurr,bitIndex46)<->v302(VarCurr,bitIndex238))& (v523(VarCurr,bitIndex45)<->v302(VarCurr,bitIndex237))& (v523(VarCurr,bitIndex44)<->v302(VarCurr,bitIndex236))& (v523(VarCurr,bitIndex43)<->v302(VarCurr,bitIndex235))& (v523(VarCurr,bitIndex42)<->v302(VarCurr,bitIndex234))& (v523(VarCurr,bitIndex41)<->v302(VarCurr,bitIndex233))& (v523(VarCurr,bitIndex40)<->v302(VarCurr,bitIndex232))& (v523(VarCurr,bitIndex39)<->v302(VarCurr,bitIndex231))& (v523(VarCurr,bitIndex38)<->v302(VarCurr,bitIndex230))& (v523(VarCurr,bitIndex37)<->v302(VarCurr,bitIndex229))& (v523(VarCurr,bitIndex36)<->v302(VarCurr,bitIndex228))& (v523(VarCurr,bitIndex35)<->v302(VarCurr,bitIndex227))& (v523(VarCurr,bitIndex34)<->v302(VarCurr,bitIndex226))& (v523(VarCurr,bitIndex33)<->v302(VarCurr,bitIndex225))& (v523(VarCurr,bitIndex32)<->v302(VarCurr,bitIndex224))& (v523(VarCurr,bitIndex31)<->v302(VarCurr,bitIndex223))& (v523(VarCurr,bitIndex30)<->v302(VarCurr,bitIndex222))& (v523(VarCurr,bitIndex29)<->v302(VarCurr,bitIndex221))& (v523(VarCurr,bitIndex28)<->v302(VarCurr,bitIndex220))& (v523(VarCurr,bitIndex27)<->v302(VarCurr,bitIndex219))& (v523(VarCurr,bitIndex26)<->v302(VarCurr,bitIndex218))& (v523(VarCurr,bitIndex25)<->v302(VarCurr,bitIndex217))& (v523(VarCurr,bitIndex24)<->v302(VarCurr,bitIndex216))& (v523(VarCurr,bitIndex23)<->v302(VarCurr,bitIndex215))& (v523(VarCurr,bitIndex22)<->v302(VarCurr,bitIndex214))& (v523(VarCurr,bitIndex21)<->v302(VarCurr,bitIndex213))& (v523(VarCurr,bitIndex20)<->v302(VarCurr,bitIndex212))& (v523(VarCurr,bitIndex19)<->v302(VarCurr,bitIndex211))& (v523(VarCurr,bitIndex18)<->v302(VarCurr,bitIndex210))& (v523(VarCurr,bitIndex17)<->v302(VarCurr,bitIndex209))& (v523(VarCurr,bitIndex16)<->v302(VarCurr,bitIndex208))& (v523(VarCurr,bitIndex15)<->v302(VarCurr,bitIndex207))& (v523(VarCurr,bitIndex14)<->v302(VarCurr,bitIndex206))& (v523(VarCurr,bitIndex13)<->v302(VarCurr,bitIndex205))& (v523(VarCurr,bitIndex12)<->v302(VarCurr,bitIndex204))& (v523(VarCurr,bitIndex11)<->v302(VarCurr,bitIndex203))& (v523(VarCurr,bitIndex10)<->v302(VarCurr,bitIndex202))& (v523(VarCurr,bitIndex9)<->v302(VarCurr,bitIndex201))& (v523(VarCurr,bitIndex8)<->v302(VarCurr,bitIndex200))& (v523(VarCurr,bitIndex7)<->v302(VarCurr,bitIndex199))& (v523(VarCurr,bitIndex6)<->v302(VarCurr,bitIndex198))& (v523(VarCurr,bitIndex5)<->v302(VarCurr,bitIndex197))& (v523(VarCurr,bitIndex4)<->v302(VarCurr,bitIndex196))& (v523(VarCurr,bitIndex3)<->v302(VarCurr,bitIndex195))& (v523(VarCurr,bitIndex2)<->v302(VarCurr,bitIndex194))& (v523(VarCurr,bitIndex1)<->v302(VarCurr,bitIndex193))& (v523(VarCurr,bitIndex0)<->v302(VarCurr,bitIndex192))).
% 299.21/297.45  all VarCurr (v528(VarCurr)-> (v523(VarCurr,bitIndex47)<->v302(VarCurr,bitIndex287))& (v523(VarCurr,bitIndex46)<->v302(VarCurr,bitIndex286))& (v523(VarCurr,bitIndex45)<->v302(VarCurr,bitIndex285))& (v523(VarCurr,bitIndex44)<->v302(VarCurr,bitIndex284))& (v523(VarCurr,bitIndex43)<->v302(VarCurr,bitIndex283))& (v523(VarCurr,bitIndex42)<->v302(VarCurr,bitIndex282))& (v523(VarCurr,bitIndex41)<->v302(VarCurr,bitIndex281))& (v523(VarCurr,bitIndex40)<->v302(VarCurr,bitIndex280))& (v523(VarCurr,bitIndex39)<->v302(VarCurr,bitIndex279))& (v523(VarCurr,bitIndex38)<->v302(VarCurr,bitIndex278))& (v523(VarCurr,bitIndex37)<->v302(VarCurr,bitIndex277))& (v523(VarCurr,bitIndex36)<->v302(VarCurr,bitIndex276))& (v523(VarCurr,bitIndex35)<->v302(VarCurr,bitIndex275))& (v523(VarCurr,bitIndex34)<->v302(VarCurr,bitIndex274))& (v523(VarCurr,bitIndex33)<->v302(VarCurr,bitIndex273))& (v523(VarCurr,bitIndex32)<->v302(VarCurr,bitIndex272))& (v523(VarCurr,bitIndex31)<->v302(VarCurr,bitIndex271))& (v523(VarCurr,bitIndex30)<->v302(VarCurr,bitIndex270))& (v523(VarCurr,bitIndex29)<->v302(VarCurr,bitIndex269))& (v523(VarCurr,bitIndex28)<->v302(VarCurr,bitIndex268))& (v523(VarCurr,bitIndex27)<->v302(VarCurr,bitIndex267))& (v523(VarCurr,bitIndex26)<->v302(VarCurr,bitIndex266))& (v523(VarCurr,bitIndex25)<->v302(VarCurr,bitIndex265))& (v523(VarCurr,bitIndex24)<->v302(VarCurr,bitIndex264))& (v523(VarCurr,bitIndex23)<->v302(VarCurr,bitIndex263))& (v523(VarCurr,bitIndex22)<->v302(VarCurr,bitIndex262))& (v523(VarCurr,bitIndex21)<->v302(VarCurr,bitIndex261))& (v523(VarCurr,bitIndex20)<->v302(VarCurr,bitIndex260))& (v523(VarCurr,bitIndex19)<->v302(VarCurr,bitIndex259))& (v523(VarCurr,bitIndex18)<->v302(VarCurr,bitIndex258))& (v523(VarCurr,bitIndex17)<->v302(VarCurr,bitIndex257))& (v523(VarCurr,bitIndex16)<->v302(VarCurr,bitIndex256))& (v523(VarCurr,bitIndex15)<->v302(VarCurr,bitIndex255))& (v523(VarCurr,bitIndex14)<->v302(VarCurr,bitIndex254))& (v523(VarCurr,bitIndex13)<->v302(VarCurr,bitIndex253))& (v523(VarCurr,bitIndex12)<->v302(VarCurr,bitIndex252))& (v523(VarCurr,bitIndex11)<->v302(VarCurr,bitIndex251))& (v523(VarCurr,bitIndex10)<->v302(VarCurr,bitIndex250))& (v523(VarCurr,bitIndex9)<->v302(VarCurr,bitIndex249))& (v523(VarCurr,bitIndex8)<->v302(VarCurr,bitIndex248))& (v523(VarCurr,bitIndex7)<->v302(VarCurr,bitIndex247))& (v523(VarCurr,bitIndex6)<->v302(VarCurr,bitIndex246))& (v523(VarCurr,bitIndex5)<->v302(VarCurr,bitIndex245))& (v523(VarCurr,bitIndex4)<->v302(VarCurr,bitIndex244))& (v523(VarCurr,bitIndex3)<->v302(VarCurr,bitIndex243))& (v523(VarCurr,bitIndex2)<->v302(VarCurr,bitIndex242))& (v523(VarCurr,bitIndex1)<->v302(VarCurr,bitIndex241))& (v523(VarCurr,bitIndex0)<->v302(VarCurr,bitIndex240))).
% 299.29/297.46  all VarCurr (v527(VarCurr)-> (v523(VarCurr,bitIndex47)<->v302(VarCurr,bitIndex335))& (v523(VarCurr,bitIndex46)<->v302(VarCurr,bitIndex334))& (v523(VarCurr,bitIndex45)<->v302(VarCurr,bitIndex333))& (v523(VarCurr,bitIndex44)<->v302(VarCurr,bitIndex332))& (v523(VarCurr,bitIndex43)<->v302(VarCurr,bitIndex331))& (v523(VarCurr,bitIndex42)<->v302(VarCurr,bitIndex330))& (v523(VarCurr,bitIndex41)<->v302(VarCurr,bitIndex329))& (v523(VarCurr,bitIndex40)<->v302(VarCurr,bitIndex328))& (v523(VarCurr,bitIndex39)<->v302(VarCurr,bitIndex327))& (v523(VarCurr,bitIndex38)<->v302(VarCurr,bitIndex326))& (v523(VarCurr,bitIndex37)<->v302(VarCurr,bitIndex325))& (v523(VarCurr,bitIndex36)<->v302(VarCurr,bitIndex324))& (v523(VarCurr,bitIndex35)<->v302(VarCurr,bitIndex323))& (v523(VarCurr,bitIndex34)<->v302(VarCurr,bitIndex322))& (v523(VarCurr,bitIndex33)<->v302(VarCurr,bitIndex321))& (v523(VarCurr,bitIndex32)<->v302(VarCurr,bitIndex320))& (v523(VarCurr,bitIndex31)<->v302(VarCurr,bitIndex319))& (v523(VarCurr,bitIndex30)<->v302(VarCurr,bitIndex318))& (v523(VarCurr,bitIndex29)<->v302(VarCurr,bitIndex317))& (v523(VarCurr,bitIndex28)<->v302(VarCurr,bitIndex316))& (v523(VarCurr,bitIndex27)<->v302(VarCurr,bitIndex315))& (v523(VarCurr,bitIndex26)<->v302(VarCurr,bitIndex314))& (v523(VarCurr,bitIndex25)<->v302(VarCurr,bitIndex313))& (v523(VarCurr,bitIndex24)<->v302(VarCurr,bitIndex312))& (v523(VarCurr,bitIndex23)<->v302(VarCurr,bitIndex311))& (v523(VarCurr,bitIndex22)<->v302(VarCurr,bitIndex310))& (v523(VarCurr,bitIndex21)<->v302(VarCurr,bitIndex309))& (v523(VarCurr,bitIndex20)<->v302(VarCurr,bitIndex308))& (v523(VarCurr,bitIndex19)<->v302(VarCurr,bitIndex307))& (v523(VarCurr,bitIndex18)<->v302(VarCurr,bitIndex306))& (v523(VarCurr,bitIndex17)<->v302(VarCurr,bitIndex305))& (v523(VarCurr,bitIndex16)<->v302(VarCurr,bitIndex304))& (v523(VarCurr,bitIndex15)<->v302(VarCurr,bitIndex303))& (v523(VarCurr,bitIndex14)<->v302(VarCurr,bitIndex302))& (v523(VarCurr,bitIndex13)<->v302(VarCurr,bitIndex301))& (v523(VarCurr,bitIndex12)<->v302(VarCurr,bitIndex300))& (v523(VarCurr,bitIndex11)<->v302(VarCurr,bitIndex299))& (v523(VarCurr,bitIndex10)<->v302(VarCurr,bitIndex298))& (v523(VarCurr,bitIndex9)<->v302(VarCurr,bitIndex297))& (v523(VarCurr,bitIndex8)<->v302(VarCurr,bitIndex296))& (v523(VarCurr,bitIndex7)<->v302(VarCurr,bitIndex295))& (v523(VarCurr,bitIndex6)<->v302(VarCurr,bitIndex294))& (v523(VarCurr,bitIndex5)<->v302(VarCurr,bitIndex293))& (v523(VarCurr,bitIndex4)<->v302(VarCurr,bitIndex292))& (v523(VarCurr,bitIndex3)<->v302(VarCurr,bitIndex291))& (v523(VarCurr,bitIndex2)<->v302(VarCurr,bitIndex290))& (v523(VarCurr,bitIndex1)<->v302(VarCurr,bitIndex289))& (v523(VarCurr,bitIndex0)<->v302(VarCurr,bitIndex288))).
% 299.29/297.47  all VarCurr (v525(VarCurr)-> (v523(VarCurr,bitIndex47)<->v302(VarCurr,bitIndex383))& (v523(VarCurr,bitIndex46)<->v302(VarCurr,bitIndex382))& (v523(VarCurr,bitIndex45)<->v302(VarCurr,bitIndex381))& (v523(VarCurr,bitIndex44)<->v302(VarCurr,bitIndex380))& (v523(VarCurr,bitIndex43)<->v302(VarCurr,bitIndex379))& (v523(VarCurr,bitIndex42)<->v302(VarCurr,bitIndex378))& (v523(VarCurr,bitIndex41)<->v302(VarCurr,bitIndex377))& (v523(VarCurr,bitIndex40)<->v302(VarCurr,bitIndex376))& (v523(VarCurr,bitIndex39)<->v302(VarCurr,bitIndex375))& (v523(VarCurr,bitIndex38)<->v302(VarCurr,bitIndex374))& (v523(VarCurr,bitIndex37)<->v302(VarCurr,bitIndex373))& (v523(VarCurr,bitIndex36)<->v302(VarCurr,bitIndex372))& (v523(VarCurr,bitIndex35)<->v302(VarCurr,bitIndex371))& (v523(VarCurr,bitIndex34)<->v302(VarCurr,bitIndex370))& (v523(VarCurr,bitIndex33)<->v302(VarCurr,bitIndex369))& (v523(VarCurr,bitIndex32)<->v302(VarCurr,bitIndex368))& (v523(VarCurr,bitIndex31)<->v302(VarCurr,bitIndex367))& (v523(VarCurr,bitIndex30)<->v302(VarCurr,bitIndex366))& (v523(VarCurr,bitIndex29)<->v302(VarCurr,bitIndex365))& (v523(VarCurr,bitIndex28)<->v302(VarCurr,bitIndex364))& (v523(VarCurr,bitIndex27)<->v302(VarCurr,bitIndex363))& (v523(VarCurr,bitIndex26)<->v302(VarCurr,bitIndex362))& (v523(VarCurr,bitIndex25)<->v302(VarCurr,bitIndex361))& (v523(VarCurr,bitIndex24)<->v302(VarCurr,bitIndex360))& (v523(VarCurr,bitIndex23)<->v302(VarCurr,bitIndex359))& (v523(VarCurr,bitIndex22)<->v302(VarCurr,bitIndex358))& (v523(VarCurr,bitIndex21)<->v302(VarCurr,bitIndex357))& (v523(VarCurr,bitIndex20)<->v302(VarCurr,bitIndex356))& (v523(VarCurr,bitIndex19)<->v302(VarCurr,bitIndex355))& (v523(VarCurr,bitIndex18)<->v302(VarCurr,bitIndex354))& (v523(VarCurr,bitIndex17)<->v302(VarCurr,bitIndex353))& (v523(VarCurr,bitIndex16)<->v302(VarCurr,bitIndex352))& (v523(VarCurr,bitIndex15)<->v302(VarCurr,bitIndex351))& (v523(VarCurr,bitIndex14)<->v302(VarCurr,bitIndex350))& (v523(VarCurr,bitIndex13)<->v302(VarCurr,bitIndex349))& (v523(VarCurr,bitIndex12)<->v302(VarCurr,bitIndex348))& (v523(VarCurr,bitIndex11)<->v302(VarCurr,bitIndex347))& (v523(VarCurr,bitIndex10)<->v302(VarCurr,bitIndex346))& (v523(VarCurr,bitIndex9)<->v302(VarCurr,bitIndex345))& (v523(VarCurr,bitIndex8)<->v302(VarCurr,bitIndex344))& (v523(VarCurr,bitIndex7)<->v302(VarCurr,bitIndex343))& (v523(VarCurr,bitIndex6)<->v302(VarCurr,bitIndex342))& (v523(VarCurr,bitIndex5)<->v302(VarCurr,bitIndex341))& (v523(VarCurr,bitIndex4)<->v302(VarCurr,bitIndex340))& (v523(VarCurr,bitIndex3)<->v302(VarCurr,bitIndex339))& (v523(VarCurr,bitIndex2)<->v302(VarCurr,bitIndex338))& (v523(VarCurr,bitIndex1)<->v302(VarCurr,bitIndex337))& (v523(VarCurr,bitIndex0)<->v302(VarCurr,bitIndex336))).
% 299.29/297.47  all VarCurr (v533(VarCurr)<-> (v304(VarCurr,bitIndex2)<->$T)& (v304(VarCurr,bitIndex1)<->$T)& (v304(VarCurr,bitIndex0)<->$T)).
% 299.29/297.47  all VarCurr (v532(VarCurr)<-> (v304(VarCurr,bitIndex2)<->$T)& (v304(VarCurr,bitIndex1)<->$T)& (v304(VarCurr,bitIndex0)<->$F)).
% 299.29/297.47  all VarCurr (v531(VarCurr)<-> (v304(VarCurr,bitIndex2)<->$T)& (v304(VarCurr,bitIndex1)<->$F)& (v304(VarCurr,bitIndex0)<->$T)).
% 299.29/297.47  all VarCurr (v530(VarCurr)<-> (v304(VarCurr,bitIndex2)<->$T)& (v304(VarCurr,bitIndex1)<->$F)& (v304(VarCurr,bitIndex0)<->$F)).
% 299.29/297.47  all VarCurr (v529(VarCurr)<-> (v304(VarCurr,bitIndex2)<->$F)& (v304(VarCurr,bitIndex1)<->$T)& (v304(VarCurr,bitIndex0)<->$T)).
% 299.29/297.47  all VarCurr (v528(VarCurr)<-> (v304(VarCurr,bitIndex2)<->$F)& (v304(VarCurr,bitIndex1)<->$T)& (v304(VarCurr,bitIndex0)<->$F)).
% 299.29/297.47  all VarCurr (v527(VarCurr)<-> (v304(VarCurr,bitIndex2)<->$F)& (v304(VarCurr,bitIndex1)<->$F)& (v304(VarCurr,bitIndex0)<->$T)).
% 299.29/297.47  v302(constB0,bitIndex383)<->$F.
% 299.29/297.47  v302(constB0,bitIndex382)<->$F.
% 299.29/297.47  v302(constB0,bitIndex381)<->$F.
% 299.29/297.47  v302(constB0,bitIndex380)<->$F.
% 299.29/297.47  v302(constB0,bitIndex379)<->$F.
% 299.29/297.47  v302(constB0,bitIndex378)<->$F.
% 299.29/297.47  v302(constB0,bitIndex377)<->$F.
% 299.29/297.47  v302(constB0,bitIndex376)<->$F.
% 299.29/297.47  v302(constB0,bitIndex375)<->$F.
% 299.29/297.47  v302(constB0,bitIndex374)<->$F.
% 299.29/297.47  v302(constB0,bitIndex373)<->$F.
% 299.29/297.47  v302(constB0,bitIndex372)<->$F.
% 299.29/297.47  v302(constB0,bitIndex371)<->$F.
% 299.29/297.47  v302(constB0,bitIndex370)<->$F.
% 299.29/297.47  v302(constB0,bitIndex369)<->$F.
% 299.29/297.47  v302(constB0,bitIndex368)<->$F.
% 299.29/297.47  v302(constB0,bitIndex367)<->$F.
% 299.29/297.47  v302(constB0,bitIndex366)<->$F.
% 299.29/297.47  v302(constB0,bitIndex365)<->$F.
% 299.29/297.47  v302(constB0,bitIndex364)<->$F.
% 299.29/297.47  v302(constB0,bitIndex363)<->$F.
% 299.29/297.47  v302(constB0,bitIndex362)<->$F.
% 299.29/297.47  v302(constB0,bitIndex361)<->$F.
% 299.29/297.47  v302(constB0,bitIndex360)<->$F.
% 299.29/297.47  v302(constB0,bitIndex359)<->$F.
% 299.29/297.47  v302(constB0,bitIndex358)<->$F.
% 299.29/297.47  v302(constB0,bitIndex357)<->$F.
% 299.29/297.47  v302(constB0,bitIndex356)<->$F.
% 299.29/297.47  v302(constB0,bitIndex355)<->$F.
% 299.29/297.47  v302(constB0,bitIndex354)<->$F.
% 299.29/297.47  v302(constB0,bitIndex353)<->$F.
% 299.29/297.47  v302(constB0,bitIndex352)<->$F.
% 299.29/297.47  v302(constB0,bitIndex351)<->$F.
% 299.29/297.47  v302(constB0,bitIndex350)<->$F.
% 299.29/297.47  v302(constB0,bitIndex349)<->$F.
% 299.29/297.47  v302(constB0,bitIndex348)<->$F.
% 299.29/297.47  v302(constB0,bitIndex347)<->$F.
% 299.29/297.47  v302(constB0,bitIndex346)<->$F.
% 299.29/297.47  v302(constB0,bitIndex345)<->$F.
% 299.29/297.47  v302(constB0,bitIndex344)<->$F.
% 299.29/297.47  v302(constB0,bitIndex343)<->$F.
% 299.29/297.47  v302(constB0,bitIndex342)<->$F.
% 299.29/297.47  v302(constB0,bitIndex341)<->$F.
% 299.29/297.47  v302(constB0,bitIndex340)<->$F.
% 299.29/297.47  v302(constB0,bitIndex339)<->$F.
% 299.29/297.47  v302(constB0,bitIndex338)<->$F.
% 299.29/297.47  v302(constB0,bitIndex337)<->$F.
% 299.29/297.47  v302(constB0,bitIndex336)<->$F.
% 299.29/297.47  v302(constB0,bitIndex335)<->$F.
% 299.29/297.47  v302(constB0,bitIndex334)<->$F.
% 299.29/297.47  v302(constB0,bitIndex333)<->$F.
% 299.29/297.47  v302(constB0,bitIndex332)<->$F.
% 299.29/297.47  v302(constB0,bitIndex331)<->$F.
% 299.29/297.47  v302(constB0,bitIndex330)<->$F.
% 299.29/297.47  v302(constB0,bitIndex329)<->$F.
% 299.29/297.47  v302(constB0,bitIndex328)<->$F.
% 299.29/297.47  v302(constB0,bitIndex327)<->$F.
% 299.29/297.47  v302(constB0,bitIndex326)<->$F.
% 299.29/297.47  v302(constB0,bitIndex325)<->$F.
% 299.29/297.47  v302(constB0,bitIndex324)<->$F.
% 299.29/297.47  v302(constB0,bitIndex323)<->$F.
% 299.29/297.47  v302(constB0,bitIndex322)<->$F.
% 299.29/297.47  v302(constB0,bitIndex321)<->$F.
% 299.29/297.47  v302(constB0,bitIndex320)<->$F.
% 299.29/297.47  v302(constB0,bitIndex319)<->$F.
% 299.29/297.47  v302(constB0,bitIndex318)<->$F.
% 299.29/297.47  v302(constB0,bitIndex317)<->$F.
% 299.29/297.47  v302(constB0,bitIndex316)<->$F.
% 299.29/297.47  v302(constB0,bitIndex315)<->$F.
% 299.29/297.47  v302(constB0,bitIndex314)<->$F.
% 299.29/297.47  v302(constB0,bitIndex313)<->$F.
% 299.29/297.47  v302(constB0,bitIndex312)<->$F.
% 299.29/297.47  v302(constB0,bitIndex311)<->$F.
% 299.29/297.47  v302(constB0,bitIndex310)<->$F.
% 299.29/297.47  v302(constB0,bitIndex309)<->$F.
% 299.29/297.47  v302(constB0,bitIndex308)<->$F.
% 299.29/297.47  v302(constB0,bitIndex307)<->$F.
% 299.29/297.47  v302(constB0,bitIndex306)<->$F.
% 299.29/297.47  v302(constB0,bitIndex305)<->$F.
% 299.29/297.47  v302(constB0,bitIndex304)<->$F.
% 299.29/297.47  v302(constB0,bitIndex303)<->$F.
% 299.29/297.47  v302(constB0,bitIndex302)<->$F.
% 299.29/297.47  v302(constB0,bitIndex301)<->$F.
% 299.29/297.47  v302(constB0,bitIndex300)<->$F.
% 299.29/297.47  v302(constB0,bitIndex299)<->$F.
% 299.29/297.47  v302(constB0,bitIndex298)<->$F.
% 299.29/297.47  v302(constB0,bitIndex297)<->$F.
% 299.29/297.47  v302(constB0,bitIndex296)<->$F.
% 299.29/297.47  v302(constB0,bitIndex295)<->$F.
% 299.29/297.47  v302(constB0,bitIndex294)<->$F.
% 299.29/297.47  v302(constB0,bitIndex293)<->$F.
% 299.29/297.47  v302(constB0,bitIndex292)<->$F.
% 299.29/297.47  v302(constB0,bitIndex291)<->$F.
% 299.29/297.47  v302(constB0,bitIndex290)<->$F.
% 299.29/297.47  v302(constB0,bitIndex289)<->$F.
% 299.29/297.47  v302(constB0,bitIndex288)<->$F.
% 299.29/297.47  v302(constB0,bitIndex287)<->$F.
% 299.29/297.47  v302(constB0,bitIndex286)<->$F.
% 299.29/297.47  v302(constB0,bitIndex285)<->$F.
% 299.29/297.47  v302(constB0,bitIndex284)<->$F.
% 299.29/297.47  v302(constB0,bitIndex283)<->$F.
% 299.29/297.47  v302(constB0,bitIndex282)<->$F.
% 299.29/297.47  v302(constB0,bitIndex281)<->$F.
% 299.29/297.47  v302(constB0,bitIndex280)<->$F.
% 299.29/297.47  v302(constB0,bitIndex279)<->$F.
% 299.29/297.47  v302(constB0,bitIndex278)<->$F.
% 299.29/297.47  v302(constB0,bitIndex277)<->$F.
% 299.29/297.47  v302(constB0,bitIndex276)<->$F.
% 299.29/297.47  v302(constB0,bitIndex275)<->$F.
% 299.29/297.47  v302(constB0,bitIndex274)<->$F.
% 299.29/297.47  v302(constB0,bitIndex273)<->$F.
% 299.31/297.48  v302(constB0,bitIndex272)<->$F.
% 299.31/297.48  v302(constB0,bitIndex271)<->$F.
% 299.31/297.48  v302(constB0,bitIndex270)<->$F.
% 299.31/297.48  v302(constB0,bitIndex269)<->$F.
% 299.31/297.48  v302(constB0,bitIndex268)<->$F.
% 299.31/297.48  v302(constB0,bitIndex267)<->$F.
% 299.31/297.48  v302(constB0,bitIndex266)<->$F.
% 299.31/297.48  v302(constB0,bitIndex265)<->$F.
% 299.31/297.48  v302(constB0,bitIndex264)<->$F.
% 299.31/297.48  v302(constB0,bitIndex263)<->$F.
% 299.31/297.48  v302(constB0,bitIndex262)<->$F.
% 299.31/297.48  v302(constB0,bitIndex261)<->$F.
% 299.31/297.48  v302(constB0,bitIndex260)<->$F.
% 299.31/297.48  v302(constB0,bitIndex259)<->$F.
% 299.31/297.48  v302(constB0,bitIndex258)<->$F.
% 299.31/297.48  v302(constB0,bitIndex257)<->$F.
% 299.31/297.48  v302(constB0,bitIndex256)<->$F.
% 299.31/297.48  v302(constB0,bitIndex255)<->$F.
% 299.31/297.48  v302(constB0,bitIndex254)<->$F.
% 299.31/297.48  v302(constB0,bitIndex253)<->$F.
% 299.31/297.48  v302(constB0,bitIndex252)<->$F.
% 299.31/297.48  v302(constB0,bitIndex251)<->$F.
% 299.31/297.48  v302(constB0,bitIndex250)<->$F.
% 299.31/297.48  v302(constB0,bitIndex249)<->$F.
% 299.31/297.48  v302(constB0,bitIndex248)<->$F.
% 299.31/297.48  v302(constB0,bitIndex247)<->$F.
% 299.31/297.48  v302(constB0,bitIndex246)<->$F.
% 299.31/297.48  v302(constB0,bitIndex245)<->$F.
% 299.31/297.48  v302(constB0,bitIndex244)<->$F.
% 299.31/297.48  v302(constB0,bitIndex243)<->$F.
% 299.31/297.48  v302(constB0,bitIndex242)<->$F.
% 299.31/297.48  v302(constB0,bitIndex241)<->$F.
% 299.31/297.48  v302(constB0,bitIndex240)<->$F.
% 299.31/297.48  v302(constB0,bitIndex239)<->$F.
% 299.31/297.48  v302(constB0,bitIndex238)<->$F.
% 299.31/297.48  v302(constB0,bitIndex237)<->$F.
% 299.31/297.48  v302(constB0,bitIndex236)<->$F.
% 299.31/297.48  v302(constB0,bitIndex235)<->$F.
% 299.31/297.48  v302(constB0,bitIndex234)<->$F.
% 299.31/297.48  v302(constB0,bitIndex233)<->$F.
% 299.31/297.48  v302(constB0,bitIndex232)<->$F.
% 299.31/297.48  v302(constB0,bitIndex231)<->$F.
% 299.31/297.48  v302(constB0,bitIndex230)<->$F.
% 299.31/297.48  v302(constB0,bitIndex229)<->$F.
% 299.31/297.48  v302(constB0,bitIndex228)<->$F.
% 299.31/297.48  v302(constB0,bitIndex227)<->$F.
% 299.31/297.48  v302(constB0,bitIndex226)<->$F.
% 299.31/297.48  v302(constB0,bitIndex225)<->$F.
% 299.31/297.48  v302(constB0,bitIndex224)<->$F.
% 299.31/297.48  v302(constB0,bitIndex223)<->$F.
% 299.31/297.48  v302(constB0,bitIndex222)<->$F.
% 299.31/297.48  v302(constB0,bitIndex221)<->$F.
% 299.31/297.48  v302(constB0,bitIndex220)<->$F.
% 299.31/297.48  v302(constB0,bitIndex219)<->$F.
% 299.31/297.48  v302(constB0,bitIndex218)<->$F.
% 299.31/297.48  v302(constB0,bitIndex217)<->$F.
% 299.31/297.48  v302(constB0,bitIndex216)<->$F.
% 299.31/297.48  v302(constB0,bitIndex215)<->$F.
% 299.31/297.48  v302(constB0,bitIndex214)<->$F.
% 299.31/297.48  v302(constB0,bitIndex213)<->$F.
% 299.31/297.48  v302(constB0,bitIndex212)<->$F.
% 299.31/297.48  v302(constB0,bitIndex211)<->$F.
% 299.31/297.48  v302(constB0,bitIndex210)<->$F.
% 299.31/297.48  v302(constB0,bitIndex209)<->$F.
% 299.31/297.48  v302(constB0,bitIndex208)<->$F.
% 299.31/297.48  v302(constB0,bitIndex207)<->$F.
% 299.31/297.48  v302(constB0,bitIndex206)<->$F.
% 299.31/297.48  v302(constB0,bitIndex205)<->$F.
% 299.31/297.48  v302(constB0,bitIndex204)<->$F.
% 299.31/297.48  v302(constB0,bitIndex203)<->$F.
% 299.31/297.48  v302(constB0,bitIndex202)<->$F.
% 299.31/297.48  v302(constB0,bitIndex201)<->$F.
% 299.31/297.48  v302(constB0,bitIndex200)<->$F.
% 299.31/297.48  v302(constB0,bitIndex199)<->$F.
% 299.31/297.48  v302(constB0,bitIndex198)<->$F.
% 299.31/297.48  v302(constB0,bitIndex197)<->$F.
% 299.31/297.48  v302(constB0,bitIndex196)<->$F.
% 299.31/297.48  v302(constB0,bitIndex195)<->$F.
% 299.31/297.48  v302(constB0,bitIndex194)<->$F.
% 299.31/297.48  v302(constB0,bitIndex193)<->$F.
% 299.31/297.48  v302(constB0,bitIndex192)<->$F.
% 299.31/297.48  v302(constB0,bitIndex191)<->$F.
% 299.31/297.48  v302(constB0,bitIndex190)<->$F.
% 299.31/297.48  v302(constB0,bitIndex189)<->$F.
% 299.31/297.48  v302(constB0,bitIndex188)<->$F.
% 299.31/297.48  v302(constB0,bitIndex187)<->$F.
% 299.31/297.48  v302(constB0,bitIndex186)<->$F.
% 299.31/297.48  v302(constB0,bitIndex185)<->$F.
% 299.31/297.48  v302(constB0,bitIndex184)<->$F.
% 299.31/297.48  v302(constB0,bitIndex183)<->$F.
% 299.31/297.48  v302(constB0,bitIndex182)<->$F.
% 299.31/297.48  v302(constB0,bitIndex181)<->$F.
% 299.31/297.48  v302(constB0,bitIndex180)<->$F.
% 299.31/297.48  v302(constB0,bitIndex179)<->$F.
% 299.31/297.48  v302(constB0,bitIndex178)<->$F.
% 299.31/297.48  v302(constB0,bitIndex177)<->$F.
% 299.31/297.48  v302(constB0,bitIndex176)<->$F.
% 299.31/297.48  v302(constB0,bitIndex175)<->$F.
% 299.31/297.48  v302(constB0,bitIndex174)<->$F.
% 299.31/297.48  v302(constB0,bitIndex173)<->$F.
% 299.31/297.48  v302(constB0,bitIndex172)<->$F.
% 299.31/297.48  v302(constB0,bitIndex171)<->$F.
% 299.31/297.48  v302(constB0,bitIndex170)<->$F.
% 299.31/297.48  v302(constB0,bitIndex169)<->$F.
% 299.31/297.48  v302(constB0,bitIndex168)<->$F.
% 299.31/297.48  v302(constB0,bitIndex167)<->$F.
% 299.31/297.48  v302(constB0,bitIndex166)<->$F.
% 299.31/297.48  v302(constB0,bitIndex165)<->$F.
% 299.31/297.48  v302(constB0,bitIndex164)<->$F.
% 299.31/297.48  v302(constB0,bitIndex163)<->$F.
% 299.31/297.48  v302(constB0,bitIndex162)<->$F.
% 299.31/297.48  v302(constB0,bitIndex161)<->$F.
% 299.31/297.48  v302(constB0,bitIndex160)<->$F.
% 299.31/297.48  v302(constB0,bitIndex159)<->$F.
% 299.31/297.48  v302(constB0,bitIndex158)<->$F.
% 299.31/297.48  v302(constB0,bitIndex157)<->$F.
% 299.31/297.48  v302(constB0,bitIndex156)<->$F.
% 299.31/297.48  v302(constB0,bitIndex155)<->$F.
% 299.31/297.48  v302(constB0,bitIndex154)<->$F.
% 299.31/297.48  v302(constB0,bitIndex153)<->$F.
% 299.31/297.48  v302(constB0,bitIndex152)<->$F.
% 299.31/297.48  v302(constB0,bitIndex151)<->$F.
% 299.31/297.48  v302(constB0,bitIndex150)<->$F.
% 299.31/297.48  v302(constB0,bitIndex149)<->$F.
% 299.31/297.48  v302(constB0,bitIndex148)<->$F.
% 299.31/297.48  v302(constB0,bitIndex147)<->$F.
% 299.31/297.48  v302(constB0,bitIndex146)<->$F.
% 299.31/297.48  v302(constB0,bitIndex145)<->$F.
% 299.31/297.49  v302(constB0,bitIndex144)<->$F.
% 299.31/297.49  v302(constB0,bitIndex143)<->$F.
% 299.31/297.49  v302(constB0,bitIndex142)<->$F.
% 299.31/297.49  v302(constB0,bitIndex141)<->$F.
% 299.31/297.49  v302(constB0,bitIndex140)<->$F.
% 299.31/297.49  v302(constB0,bitIndex139)<->$F.
% 299.31/297.49  v302(constB0,bitIndex138)<->$F.
% 299.31/297.49  v302(constB0,bitIndex137)<->$F.
% 299.31/297.49  v302(constB0,bitIndex136)<->$F.
% 299.31/297.49  v302(constB0,bitIndex135)<->$F.
% 299.31/297.49  v302(constB0,bitIndex134)<->$F.
% 299.31/297.49  v302(constB0,bitIndex133)<->$F.
% 299.31/297.49  v302(constB0,bitIndex132)<->$F.
% 299.31/297.49  v302(constB0,bitIndex131)<->$F.
% 299.31/297.49  v302(constB0,bitIndex130)<->$F.
% 299.31/297.49  v302(constB0,bitIndex129)<->$F.
% 299.31/297.49  v302(constB0,bitIndex128)<->$F.
% 299.31/297.49  v302(constB0,bitIndex127)<->$F.
% 299.31/297.49  v302(constB0,bitIndex126)<->$F.
% 299.31/297.49  v302(constB0,bitIndex125)<->$F.
% 299.31/297.49  v302(constB0,bitIndex124)<->$F.
% 299.31/297.49  v302(constB0,bitIndex123)<->$F.
% 299.31/297.49  v302(constB0,bitIndex122)<->$F.
% 299.31/297.49  v302(constB0,bitIndex121)<->$F.
% 299.31/297.49  v302(constB0,bitIndex120)<->$F.
% 299.31/297.49  v302(constB0,bitIndex119)<->$F.
% 299.31/297.49  v302(constB0,bitIndex118)<->$F.
% 299.31/297.49  v302(constB0,bitIndex117)<->$F.
% 299.31/297.49  v302(constB0,bitIndex116)<->$F.
% 299.31/297.49  v302(constB0,bitIndex115)<->$F.
% 299.31/297.49  v302(constB0,bitIndex114)<->$F.
% 299.31/297.49  v302(constB0,bitIndex113)<->$F.
% 299.31/297.49  v302(constB0,bitIndex112)<->$F.
% 299.31/297.49  v302(constB0,bitIndex111)<->$F.
% 299.31/297.49  v302(constB0,bitIndex110)<->$F.
% 299.31/297.49  v302(constB0,bitIndex109)<->$F.
% 299.31/297.49  v302(constB0,bitIndex108)<->$F.
% 299.31/297.49  v302(constB0,bitIndex107)<->$F.
% 299.31/297.49  v302(constB0,bitIndex106)<->$F.
% 299.31/297.49  v302(constB0,bitIndex105)<->$F.
% 299.31/297.49  v302(constB0,bitIndex104)<->$F.
% 299.31/297.49  v302(constB0,bitIndex103)<->$F.
% 299.31/297.49  v302(constB0,bitIndex102)<->$F.
% 299.31/297.49  v302(constB0,bitIndex101)<->$F.
% 299.31/297.49  v302(constB0,bitIndex100)<->$F.
% 299.31/297.49  v302(constB0,bitIndex99)<->$F.
% 299.31/297.49  v302(constB0,bitIndex98)<->$F.
% 299.31/297.49  v302(constB0,bitIndex97)<->$F.
% 299.31/297.49  v302(constB0,bitIndex96)<->$F.
% 299.31/297.49  v302(constB0,bitIndex95)<->$F.
% 299.31/297.49  v302(constB0,bitIndex94)<->$F.
% 299.31/297.49  v302(constB0,bitIndex93)<->$F.
% 299.31/297.49  v302(constB0,bitIndex92)<->$F.
% 299.31/297.49  v302(constB0,bitIndex91)<->$F.
% 299.31/297.49  v302(constB0,bitIndex90)<->$F.
% 299.31/297.49  v302(constB0,bitIndex89)<->$F.
% 299.31/297.49  v302(constB0,bitIndex88)<->$F.
% 299.31/297.49  v302(constB0,bitIndex87)<->$F.
% 299.31/297.49  v302(constB0,bitIndex86)<->$F.
% 299.31/297.49  v302(constB0,bitIndex85)<->$F.
% 299.31/297.49  v302(constB0,bitIndex84)<->$F.
% 299.31/297.49  v302(constB0,bitIndex83)<->$F.
% 299.31/297.49  v302(constB0,bitIndex82)<->$F.
% 299.31/297.49  v302(constB0,bitIndex81)<->$F.
% 299.31/297.49  v302(constB0,bitIndex80)<->$F.
% 299.31/297.49  v302(constB0,bitIndex79)<->$F.
% 299.31/297.49  v302(constB0,bitIndex78)<->$F.
% 299.31/297.49  v302(constB0,bitIndex77)<->$F.
% 299.31/297.49  v302(constB0,bitIndex76)<->$F.
% 299.31/297.49  v302(constB0,bitIndex75)<->$F.
% 299.31/297.49  v302(constB0,bitIndex74)<->$F.
% 299.31/297.49  v302(constB0,bitIndex73)<->$F.
% 299.31/297.49  v302(constB0,bitIndex72)<->$F.
% 299.31/297.49  v302(constB0,bitIndex71)<->$F.
% 299.31/297.49  v302(constB0,bitIndex70)<->$F.
% 299.31/297.49  v302(constB0,bitIndex69)<->$F.
% 299.31/297.49  v302(constB0,bitIndex68)<->$F.
% 299.31/297.49  v302(constB0,bitIndex67)<->$F.
% 299.31/297.49  v302(constB0,bitIndex66)<->$F.
% 299.31/297.49  v302(constB0,bitIndex65)<->$F.
% 299.31/297.49  v302(constB0,bitIndex64)<->$F.
% 299.31/297.49  v302(constB0,bitIndex63)<->$F.
% 299.31/297.49  v302(constB0,bitIndex62)<->$F.
% 299.31/297.49  v302(constB0,bitIndex61)<->$F.
% 299.31/297.49  v302(constB0,bitIndex60)<->$F.
% 299.31/297.49  v302(constB0,bitIndex59)<->$F.
% 299.31/297.49  v302(constB0,bitIndex58)<->$F.
% 299.31/297.49  v302(constB0,bitIndex57)<->$F.
% 299.31/297.49  v302(constB0,bitIndex56)<->$F.
% 299.31/297.49  v302(constB0,bitIndex55)<->$F.
% 299.31/297.49  v302(constB0,bitIndex54)<->$F.
% 299.31/297.49  v302(constB0,bitIndex53)<->$F.
% 299.31/297.49  v302(constB0,bitIndex52)<->$F.
% 299.31/297.49  v302(constB0,bitIndex51)<->$F.
% 299.31/297.49  v302(constB0,bitIndex50)<->$F.
% 299.31/297.49  v302(constB0,bitIndex49)<->$F.
% 299.31/297.49  v302(constB0,bitIndex48)<->$F.
% 299.31/297.49  all B (range_47_0(B)-> (v302(constB0,B)<->$F)).
% 299.31/297.49  all B (range_47_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B|bitIndex6=B|bitIndex7=B|bitIndex8=B|bitIndex9=B|bitIndex10=B|bitIndex11=B|bitIndex12=B|bitIndex13=B|bitIndex14=B|bitIndex15=B|bitIndex16=B|bitIndex17=B|bitIndex18=B|bitIndex19=B|bitIndex20=B|bitIndex21=B|bitIndex22=B|bitIndex23=B|bitIndex24=B|bitIndex25=B|bitIndex26=B|bitIndex27=B|bitIndex28=B|bitIndex29=B|bitIndex30=B|bitIndex31=B|bitIndex32=B|bitIndex33=B|bitIndex34=B|bitIndex35=B|bitIndex36=B|bitIndex37=B|bitIndex38=B|bitIndex39=B|bitIndex40=B|bitIndex41=B|bitIndex42=B|bitIndex43=B|bitIndex44=B|bitIndex45=B|bitIndex46=B|bitIndex47=B).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex47).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex46).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex45).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex44).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex43).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex42).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex41).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex40).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex39).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex38).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex37).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex36).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex35).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex34).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex33).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex32).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex31).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex30).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex29).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex28).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex27).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex26).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex25).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex24).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex23).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex22).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex21).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex20).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex19).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex18).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex17).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex16).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex15).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex14).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex13).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex12).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex11).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex10).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex9).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex8).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex7).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex6).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex5).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex4).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex3).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex2).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex1).
% 299.31/297.49  -b000000000000000000000000000000000000000000000000(bitIndex0).
% 299.31/297.49  all VarCurr (v525(VarCurr)<-> (v304(VarCurr,bitIndex2)<->$F)& (v304(VarCurr,bitIndex1)<->$F)& (v304(VarCurr,bitIndex0)<->$F)).
% 299.31/297.49  all VarCurr (-v306(VarCurr)-> (all B (range_2_0(B)-> (v304(VarCurr,B)<->v520(VarCurr,B))))).
% 299.31/297.49  all VarCurr (v306(VarCurr)-> (all B (range_2_0(B)-> (v304(VarCurr,B)<->v503(VarCurr,B))))).
% 299.31/297.49  all B (range_8_0(B)-> (v520(constB0,B)<->$F)).
% 299.31/297.49  all B (range_8_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B|bitIndex6=B|bitIndex7=B|bitIndex8=B).
% 299.31/297.49  -b000000000(bitIndex8).
% 299.31/297.49  -b000000000(bitIndex7).
% 299.31/297.49  -b000000000(bitIndex6).
% 299.31/297.49  -b000000000(bitIndex5).
% 299.31/297.49  -b000000000(bitIndex4).
% 299.31/297.49  -b000000000(bitIndex3).
% 299.31/297.49  -b000000000(bitIndex2).
% 299.31/297.49  -b000000000(bitIndex1).
% 299.31/297.49  -b000000000(bitIndex0).
% 299.31/297.49  all VarCurr B (range_2_0(B)-> (v503(VarCurr,B)<->v505(VarCurr,B))).
% 299.31/297.49  all VarCurr B (range_2_0(B)-> (v505(VarCurr,B)<->v507(VarCurr,B))).
% 299.31/297.49  all VarCurr B (range_2_0(B)-> (v507(VarCurr,B)<->v509(VarCurr,B))).
% 299.31/297.49  all VarCurr (-v511(VarCurr)& -v512(VarCurr)& -v513(VarCurr)& -v514(VarCurr)& -v515(VarCurr)& -v516(VarCurr)& -v517(VarCurr)-> (all B (range_2_0(B)-> (v509(VarCurr,B)<->$T)))).
% 299.31/297.49  all VarCurr (v517(VarCurr)-> (all B (range_2_0(B)-> (v509(VarCurr,B)<->b110(B))))).
% 299.31/297.49  all VarCurr (v516(VarCurr)-> (all B (range_2_0(B)-> (v509(VarCurr,B)<->b101(B))))).
% 299.31/297.49  all VarCurr (v515(VarCurr)-> (all B (range_2_0(B)-> (v509(VarCurr,B)<->b100(B))))).
% 299.31/297.49  all VarCurr (v514(VarCurr)-> (all B (range_2_0(B)-> (v509(VarCurr,B)<->b011(B))))).
% 299.31/297.50  all VarCurr (v513(VarCurr)-> (all B (range_2_0(B)-> (v509(VarCurr,B)<->b100(B))))).
% 299.31/297.50  all VarCurr (v512(VarCurr)-> (all B (range_2_0(B)-> (v509(VarCurr,B)<->b101(B))))).
% 299.31/297.50  all VarCurr (v511(VarCurr)-> (all B (range_2_0(B)-> (v509(VarCurr,B)<->b110(B))))).
% 299.31/297.50  all VarCurr (v518(VarCurr)<-> (v399(VarCurr,bitIndex2)<->$T)& (v399(VarCurr,bitIndex1)<->$T)& (v399(VarCurr,bitIndex0)<->$T)).
% 299.31/297.50  all VarCurr (v517(VarCurr)<-> (v399(VarCurr,bitIndex2)<->$T)& (v399(VarCurr,bitIndex1)<->$T)& (v399(VarCurr,bitIndex0)<->$F)).
% 299.31/297.50  all VarCurr (v516(VarCurr)<-> (v399(VarCurr,bitIndex2)<->$T)& (v399(VarCurr,bitIndex1)<->$F)& (v399(VarCurr,bitIndex0)<->$T)).
% 299.31/297.50  all VarCurr (v515(VarCurr)<-> (v399(VarCurr,bitIndex2)<->$T)& (v399(VarCurr,bitIndex1)<->$F)& (v399(VarCurr,bitIndex0)<->$F)).
% 299.31/297.50  all VarCurr (v514(VarCurr)<-> (v399(VarCurr,bitIndex2)<->$F)& (v399(VarCurr,bitIndex1)<->$T)& (v399(VarCurr,bitIndex0)<->$T)).
% 299.31/297.50  all VarCurr (v513(VarCurr)<-> (v399(VarCurr,bitIndex2)<->$F)& (v399(VarCurr,bitIndex1)<->$T)& (v399(VarCurr,bitIndex0)<->$F)).
% 299.31/297.50  all VarCurr (v512(VarCurr)<-> (v399(VarCurr,bitIndex2)<->$F)& (v399(VarCurr,bitIndex1)<->$F)& (v399(VarCurr,bitIndex0)<->$T)).
% 299.31/297.50  all VarCurr (v511(VarCurr)<-> (v399(VarCurr,bitIndex2)<->$F)& (v399(VarCurr,bitIndex1)<->$F)& (v399(VarCurr,bitIndex0)<->$F)).
% 299.31/297.50  all VarCurr (v306(VarCurr)<->v308(VarCurr)).
% 299.31/297.50  all VarCurr (v308(VarCurr)<->v310(VarCurr)).
% 299.31/297.50  all VarCurr (v310(VarCurr)<->v312(VarCurr)).
% 299.31/297.50  all VarCurr (-v501(VarCurr)& -v407(VarCurr)-> (v312(VarCurr)<->$F)).
% 299.31/297.50  all VarCurr (v407(VarCurr)-> (v312(VarCurr)<->v442(VarCurr))).
% 299.31/297.50  all VarCurr (v501(VarCurr)-> (v312(VarCurr)<->$F)).
% 299.31/297.50  all VarCurr (v501(VarCurr)<->v405(VarCurr)|v406(VarCurr)).
% 299.31/297.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v489(VarNext)-> (all B (range_1_0(B)-> (v314(VarNext,B)<->v314(VarCurr,B)))))).
% 299.31/297.50  all VarNext (v489(VarNext)-> (all B (range_1_0(B)-> (v314(VarNext,B)<->v497(VarNext,B))))).
% 299.31/297.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_1_0(B)-> (v497(VarNext,B)<->v495(VarCurr,B))))).
% 299.31/297.50  all VarCurr (-v437(VarCurr)-> (all B (range_1_0(B)-> (v495(VarCurr,B)<->v318(VarCurr,B))))).
% 299.31/297.50  all VarCurr (v437(VarCurr)-> (all B (range_1_0(B)-> (v495(VarCurr,B)<->$F)))).
% 299.31/297.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v489(VarNext)<->v490(VarNext))).
% 299.31/297.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v490(VarNext)<->v492(VarNext)&v423(VarNext))).
% 299.31/297.50  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v492(VarNext)<->v430(VarNext))).
% 299.31/297.50  all VarCurr (-v454(VarCurr)& -v458(VarCurr)& -v468(VarCurr)-> (all B (range_1_0(B)-> (v318(VarCurr,B)<->$F)))).
% 299.31/297.50  all VarCurr (v468(VarCurr)-> (all B (range_1_0(B)-> (v318(VarCurr,B)<->v469(VarCurr,B))))).
% 299.31/297.50  all VarCurr (v458(VarCurr)-> (all B (range_1_0(B)-> (v318(VarCurr,B)<->v459(VarCurr,B))))).
% 299.31/297.50  all VarCurr (v454(VarCurr)-> (all B (range_1_0(B)-> (v318(VarCurr,B)<->v455(VarCurr,B))))).
% 299.31/297.50  all VarCurr (v486(VarCurr)<-> (v314(VarCurr,bitIndex1)<->$T)& (v314(VarCurr,bitIndex0)<->$T)).
% 299.31/297.50  all VarCurr (-v470(VarCurr)& -v477(VarCurr)& -v479(VarCurr)-> (all B (range_1_0(B)-> (v469(VarCurr,B)<->$F)))).
% 299.31/297.50  all VarCurr (v479(VarCurr)-> (all B (range_1_0(B)-> (v469(VarCurr,B)<->b10(B))))).
% 299.31/297.50  all VarCurr (v477(VarCurr)-> (all B (range_1_0(B)-> (v469(VarCurr,B)<->v478(VarCurr,B))))).
% 299.31/297.50  all VarCurr (v470(VarCurr)-> (all B (range_1_0(B)-> (v469(VarCurr,B)<->v476(VarCurr,B))))).
% 299.31/297.50  all VarCurr (v485(VarCurr)<-> (v399(VarCurr,bitIndex2)<->$T)& (v399(VarCurr,bitIndex1)<->$T)& (v399(VarCurr,bitIndex0)<->$T)).
% 299.31/297.50  b111(bitIndex2).
% 299.31/297.50  b111(bitIndex1).
% 299.31/297.50  b111(bitIndex0).
% 299.31/297.50  all VarCurr (v479(VarCurr)<->v481(VarCurr)|v484(VarCurr)).
% 299.31/297.50  all VarCurr (v484(VarCurr)<-> (v399(VarCurr,bitIndex2)<->$T)& (v399(VarCurr,bitIndex1)<->$T)& (v399(VarCurr,bitIndex0)<->$F)).
% 299.31/297.50  b110(bitIndex2).
% 299.31/297.50  b110(bitIndex1).
% 299.31/297.50  -b110(bitIndex0).
% 299.31/297.50  all VarCurr (v481(VarCurr)<->v482(VarCurr)|v483(VarCurr)).
% 299.31/297.50  all VarCurr (v483(VarCurr)<-> (v399(VarCurr,bitIndex2)<->$T)& (v399(VarCurr,bitIndex1)<->$F)& (v399(VarCurr,bitIndex0)<->$T)).
% 299.31/297.50  b101(bitIndex2).
% 299.31/297.50  -b101(bitIndex1).
% 299.31/297.50  b101(bitIndex0).
% 299.31/297.50  all VarCurr (v482(VarCurr)<-> (v399(VarCurr,bitIndex2)<->$T)& (v399(VarCurr,bitIndex1)<->$F)& (v399(VarCurr,bitIndex0)<->$F)).
% 299.31/297.51  b100(bitIndex2).
% 299.31/297.51  -b100(bitIndex1).
% 299.31/297.51  -b100(bitIndex0).
% 299.31/297.51  all VarCurr (-v442(VarCurr)-> (all B (range_1_0(B)-> (v478(VarCurr,B)<->$T)))).
% 299.31/297.51  all VarCurr (v442(VarCurr)-> (all B (range_1_0(B)-> (v478(VarCurr,B)<->b10(B))))).
% 299.31/297.51  all VarCurr (v477(VarCurr)<-> (v399(VarCurr,bitIndex2)<->$F)& (v399(VarCurr,bitIndex1)<->$T)& (v399(VarCurr,bitIndex0)<->$T)).
% 299.31/297.51  -b011(bitIndex2).
% 299.31/297.51  b011(bitIndex1).
% 299.31/297.51  b011(bitIndex0).
% 299.31/297.51  all VarCurr (-v350(VarCurr)-> (all B (range_1_0(B)-> (v476(VarCurr,B)<->$T)))).
% 299.31/297.51  all VarCurr (v350(VarCurr)-> (all B (range_1_0(B)-> (v476(VarCurr,B)<->b10(B))))).
% 299.31/297.51  all VarCurr (v470(VarCurr)<->v472(VarCurr)|v475(VarCurr)).
% 299.31/297.51  all VarCurr (v475(VarCurr)<-> (v399(VarCurr,bitIndex2)<->$F)& (v399(VarCurr,bitIndex1)<->$T)& (v399(VarCurr,bitIndex0)<->$F)).
% 299.31/297.51  all VarCurr (v472(VarCurr)<->v473(VarCurr)|v474(VarCurr)).
% 299.31/297.51  all VarCurr (v474(VarCurr)<-> (v399(VarCurr,bitIndex2)<->$F)& (v399(VarCurr,bitIndex1)<->$F)& (v399(VarCurr,bitIndex0)<->$T)).
% 299.31/297.51  -b001(bitIndex2).
% 299.31/297.51  -b001(bitIndex1).
% 299.31/297.51  b001(bitIndex0).
% 299.31/297.51  all VarCurr (v473(VarCurr)<-> (v399(VarCurr,bitIndex2)<->$F)& (v399(VarCurr,bitIndex1)<->$F)& (v399(VarCurr,bitIndex0)<->$F)).
% 299.31/297.51  all VarCurr (v468(VarCurr)<-> (v314(VarCurr,bitIndex1)<->$T)& (v314(VarCurr,bitIndex0)<->$F)).
% 299.31/297.51  all VarCurr (-v460(VarCurr)& -v462(VarCurr)& -v464(VarCurr)-> (all B (range_1_0(B)-> (v459(VarCurr,B)<->$T)))).
% 299.31/297.51  all VarCurr (v464(VarCurr)-> (all B (range_1_0(B)-> (v459(VarCurr,B)<->b10(B))))).
% 299.31/297.51  all VarCurr (v462(VarCurr)-> (all B (range_1_0(B)-> (v459(VarCurr,B)<->$F)))).
% 299.31/297.51  all VarCurr (v460(VarCurr)-> (all B (range_1_0(B)-> (v459(VarCurr,B)<->b01(B))))).
% 299.31/297.51  all VarCurr (v466(VarCurr)<-> (v467(VarCurr,bitIndex1)<->$T)& (v467(VarCurr,bitIndex0)<->$T)).
% 299.31/297.51  all VarCurr (v467(VarCurr,bitIndex0)<->v372(VarCurr,bitIndex0)).
% 299.31/297.51  all VarCurr (v467(VarCurr,bitIndex1)<->v350(VarCurr)).
% 299.31/297.51  all VarCurr (v464(VarCurr)<-> (v465(VarCurr,bitIndex1)<->$T)& (v465(VarCurr,bitIndex0)<->$F)).
% 299.31/297.51  all VarCurr (v465(VarCurr,bitIndex0)<->v372(VarCurr,bitIndex0)).
% 299.31/297.51  all VarCurr (v465(VarCurr,bitIndex1)<->v350(VarCurr)).
% 299.31/297.51  all VarCurr (v462(VarCurr)<-> (v463(VarCurr,bitIndex1)<->$F)& (v463(VarCurr,bitIndex0)<->$T)).
% 299.31/297.51  all VarCurr (v463(VarCurr,bitIndex0)<->v372(VarCurr,bitIndex0)).
% 299.31/297.51  all VarCurr (v463(VarCurr,bitIndex1)<->v350(VarCurr)).
% 299.31/297.51  all VarCurr (v460(VarCurr)<-> (v461(VarCurr,bitIndex1)<->$F)& (v461(VarCurr,bitIndex0)<->$F)).
% 299.31/297.51  all VarCurr (v461(VarCurr,bitIndex0)<->v372(VarCurr,bitIndex0)).
% 299.31/297.51  all VarCurr (v461(VarCurr,bitIndex1)<->v350(VarCurr)).
% 299.31/297.51  all VarCurr (v458(VarCurr)<-> (v314(VarCurr,bitIndex1)<->$F)& (v314(VarCurr,bitIndex0)<->$T)).
% 299.31/297.51  all VarCurr (-v456(VarCurr)-> (all B (range_1_0(B)-> (v455(VarCurr,B)<->b01(B))))).
% 299.31/297.51  all VarCurr (v456(VarCurr)-> (all B (range_1_0(B)-> (v455(VarCurr,B)<->$F)))).
% 299.31/297.51  all VarCurr (v457(VarCurr)<-> (v320(VarCurr)<->$T)).
% 299.31/297.51  all VarCurr (v456(VarCurr)<-> (v320(VarCurr)<->$F)).
% 299.31/297.51  all VarCurr (v454(VarCurr)<-> (v314(VarCurr,bitIndex1)<->$F)& (v314(VarCurr,bitIndex0)<->$F)).
% 299.31/297.51  all VarCurr (v442(VarCurr)<->v444(VarCurr)).
% 299.31/297.51  all VarCurr (v444(VarCurr)<->v446(VarCurr)).
% 299.31/297.51  all VarCurr (v446(VarCurr)<->v222(VarCurr,bitIndex0)&v222(VarCurr,bitIndex1)).
% 299.31/297.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v426(VarNext)-> (all B (range_2_0(B)-> (v399(VarNext,B)<->v399(VarCurr,B)))))).
% 299.31/297.51  all VarNext (v426(VarNext)-> (all B (range_2_0(B)-> (v399(VarNext,B)<->v436(VarNext,B))))).
% 299.31/297.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_2_0(B)-> (v436(VarNext,B)<->v434(VarCurr,B))))).
% 299.31/297.51  all VarCurr (-v437(VarCurr)-> (all B (range_2_0(B)-> (v434(VarCurr,B)<->v401(VarCurr,B))))).
% 299.31/297.51  all VarCurr (v437(VarCurr)-> (all B (range_2_0(B)-> (v434(VarCurr,B)<->$F)))).
% 299.31/297.51  all VarCurr (-v437(VarCurr)<->v316(VarCurr)).
% 299.31/297.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v426(VarNext)<->v427(VarNext))).
% 299.31/297.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v427(VarNext)<->v428(VarNext)&v423(VarNext))).
% 299.31/297.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v428(VarNext)<->v430(VarNext))).
% 299.31/297.51  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v430(VarNext)<->v423(VarCurr))).
% 299.31/297.51  all VarCurr (v423(VarCurr)<->v272(VarCurr)).
% 299.31/297.51  all VarCurr (-v404(VarCurr)& -v407(VarCurr)-> (all B (range_2_0(B)-> (v401(VarCurr,B)<->$F)))).
% 299.35/297.52  all VarCurr (v407(VarCurr)-> (all B (range_2_0(B)-> (v401(VarCurr,B)<->v408(VarCurr,B))))).
% 299.35/297.52  all VarCurr (v404(VarCurr)-> (all B (range_2_0(B)-> (v401(VarCurr,B)<->$F)))).
% 299.35/297.52  all VarCurr (v421(VarCurr)<-> (v314(VarCurr,bitIndex1)<->$T)& (v314(VarCurr,bitIndex0)<->$T)).
% 299.35/297.52  b11(bitIndex1).
% 299.35/297.52  b11(bitIndex0).
% 299.35/297.52  all VarCurr (v408(VarCurr,bitIndex0)<->v418(VarCurr)).
% 299.35/297.52  all VarCurr (v408(VarCurr,bitIndex1)<->v416(VarCurr)).
% 299.35/297.52  all VarCurr (v408(VarCurr,bitIndex2)<->v410(VarCurr)).
% 299.35/297.52  all VarCurr (v416(VarCurr)<->v417(VarCurr)&v420(VarCurr)).
% 299.35/297.52  all VarCurr (v420(VarCurr)<->v399(VarCurr,bitIndex0)|v399(VarCurr,bitIndex1)).
% 299.35/297.52  all VarCurr (v417(VarCurr)<->v418(VarCurr)|v419(VarCurr)).
% 299.35/297.52  all VarCurr (-v419(VarCurr)<->v399(VarCurr,bitIndex1)).
% 299.35/297.52  all VarCurr (-v418(VarCurr)<->v399(VarCurr,bitIndex0)).
% 299.35/297.52  all VarCurr (v410(VarCurr)<->v411(VarCurr)&v415(VarCurr)).
% 299.35/297.52  all VarCurr (v415(VarCurr)<->v413(VarCurr)|v399(VarCurr,bitIndex2)).
% 299.35/297.52  all VarCurr (v411(VarCurr)<->v412(VarCurr)|v414(VarCurr)).
% 299.35/297.52  all VarCurr (-v414(VarCurr)<->v399(VarCurr,bitIndex2)).
% 299.35/297.52  all VarCurr (-v412(VarCurr)<->v413(VarCurr)).
% 299.35/297.52  all VarCurr (v413(VarCurr)<->v399(VarCurr,bitIndex0)&v399(VarCurr,bitIndex1)).
% 299.35/297.52  all B (range_2_0(B)-> (v399(constB0,B)<->$F)).
% 299.35/297.52  all VarCurr (v407(VarCurr)<-> (v314(VarCurr,bitIndex1)<->$T)& (v314(VarCurr,bitIndex0)<->$F)).
% 299.35/297.52  b10(bitIndex1).
% 299.35/297.52  -b10(bitIndex0).
% 299.35/297.52  all VarCurr (v404(VarCurr)<->v405(VarCurr)|v406(VarCurr)).
% 299.35/297.52  all VarCurr (v406(VarCurr)<-> (v314(VarCurr,bitIndex1)<->$F)& (v314(VarCurr,bitIndex0)<->$T)).
% 299.35/297.52  -b01(bitIndex1).
% 299.35/297.52  b01(bitIndex0).
% 299.35/297.52  all VarCurr (v405(VarCurr)<-> (v314(VarCurr,bitIndex1)<->$F)& (v314(VarCurr,bitIndex0)<->$F)).
% 299.35/297.52  all B (range_1_0(B)-> (v314(constB0,B)<->$F)).
% 299.35/297.52  all VarCurr (v372(VarCurr,bitIndex0)<->v395(VarCurr,bitIndex0)).
% 299.35/297.52  all VarCurr (-v350(VarCurr)-> (all B (range_2_0(B)-> (v395(VarCurr,B)<->$F)))).
% 299.35/297.52  all VarCurr (v350(VarCurr)-> (all B (range_2_0(B)-> (v395(VarCurr,B)<->v374(VarCurr,B))))).
% 299.35/297.52  all VarCurr (v374(VarCurr,bitIndex0)<->v376(VarCurr,bitIndex0)).
% 299.35/297.52  all VarCurr (v376(VarCurr,bitIndex0)<->v378(VarCurr,bitIndex0)).
% 299.35/297.52  all VarCurr (v378(VarCurr,bitIndex0)<->v380(VarCurr,bitIndex0)).
% 299.35/297.52  all VarCurr (v380(VarCurr,bitIndex0)<->v382(VarCurr,bitIndex139)).
% 299.35/297.52  all VarCurr (v382(VarCurr,bitIndex139)<->v384(VarCurr,bitIndex139)).
% 299.35/297.52  all VarCurr (v384(VarCurr,bitIndex139)<->v386(VarCurr,bitIndex139)).
% 299.35/297.52  all VarCurr (v386(VarCurr,bitIndex139)<->v388(VarCurr,bitIndex139)).
% 299.35/297.52  all VarCurr (v388(VarCurr,bitIndex139)<->v390(VarCurr,bitIndex139)).
% 299.35/297.52  all VarCurr (v390(VarCurr,bitIndex139)<->v392(VarCurr)).
% 299.35/297.52  all VarCurr (v392(VarCurr)<->v394(VarCurr)).
% 299.35/297.52  v394(constB0)<->$F.
% 299.35/297.52  all VarCurr (v350(VarCurr)<->v352(VarCurr)).
% 299.35/297.52  all VarCurr (v352(VarCurr)<->v354(VarCurr)).
% 299.35/297.52  all VarCurr (v354(VarCurr)<->v356(VarCurr)).
% 299.35/297.52  all VarCurr (v356(VarCurr)<->v358(VarCurr)).
% 299.35/297.52  all VarCurr (v358(VarCurr)<->v360(VarCurr)).
% 299.35/297.52  all VarCurr (v360(VarCurr)<->v362(VarCurr)).
% 299.35/297.52  all VarCurr (v362(VarCurr)<->v364(VarCurr)).
% 299.35/297.52  all VarCurr (v364(VarCurr)<->v366(VarCurr)).
% 299.35/297.52  all VarCurr (v366(VarCurr)<->v368(VarCurr)).
% 299.35/297.52  all VarCurr (v368(VarCurr)<->v370(VarCurr)).
% 299.35/297.52  v370(constB0)<->$F.
% 299.35/297.52  all VarCurr (v320(VarCurr)<->v322(VarCurr)).
% 299.35/297.52  all VarCurr (v322(VarCurr)<->v324(VarCurr)).
% 299.35/297.52  all VarCurr (-v347(VarCurr)-> (v324(VarCurr)<->$F)).
% 299.35/297.52  all VarCurr (v347(VarCurr)-> (v324(VarCurr)<->v327(VarCurr))).
% 299.35/297.52  all VarCurr (v347(VarCurr)<-> (v237(VarCurr,bitIndex2)<->$F)& (v237(VarCurr,bitIndex1)<->$T)& (v237(VarCurr,bitIndex0)<->$F)).
% 299.35/297.52  -b010(bitIndex2).
% 299.35/297.52  b010(bitIndex1).
% 299.35/297.52  -b010(bitIndex0).
% 299.35/297.52  all VarCurr (v327(VarCurr)<->v329(VarCurr)).
% 299.35/297.52  all VarCurr (v329(VarCurr)<->v331(VarCurr)).
% 299.35/297.52  all VarCurr (v331(VarCurr)<->v333(VarCurr)).
% 299.35/297.52  all VarCurr (v333(VarCurr)<->v335(VarCurr)).
% 299.35/297.52  all VarCurr (v335(VarCurr)<->v337(VarCurr)).
% 299.35/297.52  all VarCurr (v337(VarCurr)<->v339(VarCurr)).
% 299.35/297.52  all VarCurr (v339(VarCurr)<->v341(VarCurr)).
% 299.35/297.52  all VarCurr (v341(VarCurr)<->v343(VarCurr)).
% 299.35/297.52  all VarCurr (v343(VarCurr)<->v345(VarCurr)).
% 299.35/297.52  v345(constB0)<->$F.
% 299.35/297.52  all VarCurr (v316(VarCurr)<->v226(VarCurr)).
% 299.35/297.52  all VarCurr (v214(VarCurr)<->v216(VarCurr)).
% 299.35/297.52  all VarCurr (v216(VarCurr)<->v218(VarCurr)).
% 299.35/297.52  all VarCurr (v218(VarCurr)<->v220(VarCurr)).
% 299.35/297.52  all VarCurr (v220(VarCurr)<->v222(VarCurr,bitIndex0)|v222(VarCurrCputime limit exceeded (core dumped)
%------------------------------------------------------------------------------