TSTP Solution File: HWV117+1 by Otter---3.3

View Problem - Process Solution

%------------------------------------------------------------------------------
% File     : Otter---3.3
% Problem  : HWV117+1 : TPTP v8.1.0. Released v6.1.0.
% Transfm  : none
% Format   : tptp:raw
% Command  : otter-tptp-script %s

% Computer : n029.cluster.edu
% Model    : x86_64 x86_64
% CPU      : Intel(R) Xeon(R) CPU E5-2620 v4 2.10GHz
% Memory   : 8042.1875MB
% OS       : Linux 3.10.0-693.el7.x86_64
% CPULimit : 300s
% WCLimit  : 300s
% DateTime : Wed Jul 27 12:58:46 EDT 2022

% Result   : Unknown 103.77s 103.11s
% Output   : None 
% Verified : 
% SZS Type : -

% Comments : 
%------------------------------------------------------------------------------
%----No solution output by system
%------------------------------------------------------------------------------
%----ORIGINAL SYSTEM OUTPUT
% 0.03/0.17  % Problem  : HWV117+1 : TPTP v8.1.0. Released v6.1.0.
% 0.03/0.18  % Command  : otter-tptp-script %s
% 0.13/0.38  % Computer : n029.cluster.edu
% 0.13/0.38  % Model    : x86_64 x86_64
% 0.13/0.38  % CPU      : Intel(R) Xeon(R) CPU E5-2620 v4 @ 2.10GHz
% 0.13/0.38  % Memory   : 8042.1875MB
% 0.13/0.38  % OS       : Linux 3.10.0-693.el7.x86_64
% 0.13/0.38  % CPULimit : 300
% 0.13/0.38  % WCLimit  : 300
% 0.13/0.38  % DateTime : Wed Jul 27 06:49:59 EDT 2022
% 0.13/0.38  % CPUTime  : 
% 78.16/77.55  ----- Otter 3.3f, August 2004 -----
% 78.16/77.55  The process was started by sandbox on n029.cluster.edu,
% 78.16/77.55  Wed Jul 27 06:49:59 2022
% 78.16/77.55  The command was "./otter".  The process ID is 21553.
% 78.16/77.55  
% 78.16/77.55  set(prolog_style_variables).
% 78.16/77.55  set(auto).
% 78.16/77.55     dependent: set(auto1).
% 78.16/77.55     dependent: set(process_input).
% 78.16/77.55     dependent: clear(print_kept).
% 78.16/77.55     dependent: clear(print_new_demod).
% 78.16/77.55     dependent: clear(print_back_demod).
% 78.16/77.55     dependent: clear(print_back_sub).
% 78.16/77.55     dependent: set(control_memory).
% 78.16/77.55     dependent: assign(max_mem, 12000).
% 78.16/77.55     dependent: assign(pick_given_ratio, 4).
% 78.16/77.55     dependent: assign(stats_level, 1).
% 78.16/77.55     dependent: assign(max_seconds, 10800).
% 78.16/77.55  clear(print_given).
% 78.16/77.55  
% 78.16/77.55  formula_list(usable).
% 78.16/77.55  all A (A=A).
% 78.16/77.55  nextState(constB8,constB9).
% 78.16/77.55  nextState(constB7,constB8).
% 78.16/77.55  nextState(constB6,constB7).
% 78.16/77.55  nextState(constB5,constB6).
% 78.16/77.55  nextState(constB4,constB5).
% 78.16/77.55  nextState(constB3,constB4).
% 78.16/77.55  nextState(constB2,constB3).
% 78.16/77.55  nextState(constB1,constB2).
% 78.16/77.55  nextState(constB0,constB1).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)->reachableState(VarCurr)&reachableState(VarNext)).
% 78.16/77.55  all VarState (reachableState(VarState)->constB0=VarState|constB1=VarState|constB2=VarState|constB3=VarState|constB4=VarState|constB5=VarState|constB6=VarState|constB7=VarState|constB8=VarState|constB9=VarState|constB10=VarState|constB11=VarState|constB12=VarState|constB13=VarState|constB14=VarState|constB15=VarState|constB16=VarState|constB17=VarState|constB18=VarState|constB19=VarState|constB20=VarState).
% 78.16/77.55  reachableState(constB20).
% 78.16/77.55  reachableState(constB19).
% 78.16/77.55  reachableState(constB18).
% 78.16/77.55  reachableState(constB17).
% 78.16/77.55  reachableState(constB16).
% 78.16/77.55  reachableState(constB15).
% 78.16/77.55  reachableState(constB14).
% 78.16/77.55  reachableState(constB13).
% 78.16/77.55  reachableState(constB12).
% 78.16/77.55  reachableState(constB11).
% 78.16/77.55  reachableState(constB10).
% 78.16/77.55  reachableState(constB9).
% 78.16/77.55  reachableState(constB8).
% 78.16/77.55  reachableState(constB7).
% 78.16/77.55  reachableState(constB6).
% 78.16/77.55  reachableState(constB5).
% 78.16/77.55  reachableState(constB4).
% 78.16/77.55  reachableState(constB3).
% 78.16/77.55  reachableState(constB2).
% 78.16/77.55  reachableState(constB1).
% 78.16/77.55  reachableState(constB0).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1(VarCurr)<-> -v1(VarNext))).
% 78.16/77.55  -v1(constB0).
% 78.16/77.55  -(all VarCurr (reachableState(VarCurr)->v4(VarCurr))).
% 78.16/77.55  all VarCurr (v4(VarCurr)<->v2902(VarCurr)|v2916(VarCurr)).
% 78.16/77.55  all VarCurr (v2916(VarCurr)<->v2917(VarCurr)|v2918(VarCurr)).
% 78.16/77.55  all VarCurr (v2918(VarCurr)<->v2903(VarCurr)&v2919(VarCurr)).
% 78.16/77.55  all VarCurr (v2919(VarCurr)<->v2920(VarCurr)|v2921(VarCurr)).
% 78.16/77.55  all VarCurr (v2921(VarCurr)<->v2906(VarCurr)&v2922(VarCurr)).
% 78.16/77.55  all VarCurr (v2922(VarCurr)<->v2923(VarCurr)|v2924(VarCurr)).
% 78.16/77.55  all VarCurr (v2924(VarCurr)<->v2909(VarCurr)&v2925(VarCurr)).
% 78.16/77.55  all VarCurr (v2925(VarCurr)<->v2926(VarCurr)|v2927(VarCurr)).
% 78.16/77.55  all VarCurr (v2927(VarCurr)<->v2913(VarCurr)&v2914(VarCurr)).
% 78.16/77.55  all VarCurr (v2926(VarCurr)<->v2912(VarCurr)&v2915(VarCurr)).
% 78.16/77.55  all VarCurr (v2923(VarCurr)<->v2910(VarCurr)&v2911(VarCurr)).
% 78.16/77.55  all VarCurr (v2920(VarCurr)<->v2907(VarCurr)&v2908(VarCurr)).
% 78.16/77.55  all VarCurr (v2917(VarCurr)<->v2904(VarCurr)&v2905(VarCurr)).
% 78.16/77.55  all VarCurr (v2902(VarCurr)<->v2903(VarCurr)&v2905(VarCurr)).
% 78.16/77.55  all VarCurr (v2905(VarCurr)<->v2906(VarCurr)&v2908(VarCurr)).
% 78.16/77.55  all VarCurr (v2908(VarCurr)<->v2909(VarCurr)&v2911(VarCurr)).
% 78.16/77.55  all VarCurr (v2911(VarCurr)<->v2912(VarCurr)&v2914(VarCurr)).
% 78.16/77.55  all VarCurr (-v2914(VarCurr)<->v2915(VarCurr)).
% 78.16/77.55  all VarCurr (v2915(VarCurr)<-> ($T<->v7(VarCurr,bitIndex3))).
% 78.16/77.55  all VarCurr (-v2912(VarCurr)<->v2913(VarCurr)).
% 78.16/77.55  all VarCurr (v2913(VarCurr)<-> ($T<->v7(VarCurr,bitIndex4))).
% 78.16/77.55  all VarCurr (-v2909(VarCurr)<->v2910(VarCurr)).
% 78.16/77.55  all VarCurr (v2910(VarCurr)<-> ($T<->v7(VarCurr,bitIndex2))).
% 78.16/77.55  all VarCurr (-v2906(VarCurr)<->v2907(VarCurr)).
% 78.16/77.55  all VarCurr (v2907(VarCurr)<-> ($T<->v7(VarCurr,bitIndex1))).
% 78.16/77.55  all VarCurr (-v2903(VarCurr)<->v2904(VarCurr)).
% 78.16/77.55  all VarCurr (v2904(VarCurr)<-> ($T<->v7(VarCurr,bitIndex0))).
% 78.16/77.55  all VarCurr (v7(VarCurr,bitIndex0)<->v9(VarCurr,bitIndex0)).
% 78.16/77.55  all VarCurr (v9(VarCurr,bitIndex0)<->v11(VarCurr,bitIndex0)).
% 78.16/77.55  all VarCurr (v11(VarCurr,bitIndex0)<->v13(VarCurr,bitIndex0)).
% 78.16/77.55  all VarNext (v13(VarNext,bitIndex0)<->v2893(VarNext,bitIndex0)).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2894(VarNext)-> (all B (range_4_0(B)-> (v2893(VarNext,B)<->v13(VarCurr,B)))))).
% 78.16/77.55  all VarNext (v2894(VarNext)-> (all B (range_4_0(B)-> (v2893(VarNext,B)<->v2580(VarNext,B))))).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2894(VarNext)<->v2895(VarNext))).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2895(VarNext)<->v2897(VarNext)&v355(VarNext))).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2897(VarNext)<->v362(VarNext))).
% 78.16/77.55  all VarNext (v20(VarNext,bitIndex0)<->v2885(VarNext,bitIndex0)).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2886(VarNext)-> (all B (range_4_0(B)-> (v2885(VarNext,B)<->v20(VarCurr,B)))))).
% 78.16/77.55  all VarNext (v2886(VarNext)-> (all B (range_4_0(B)-> (v2885(VarNext,B)<->v2566(VarNext,B))))).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2886(VarNext)<->v2887(VarNext))).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2887(VarNext)<->v2889(VarNext)&v355(VarNext))).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2889(VarNext)<->v362(VarNext))).
% 78.16/77.55  all VarCurr (-v2850(VarCurr)-> (v22(VarCurr,bitIndex0)<->$F)).
% 78.16/77.55  all VarCurr (v2850(VarCurr)-> (v22(VarCurr,bitIndex0)<->v2883(VarCurr))).
% 78.16/77.55  all VarCurr (-v2854(VarCurr)& -v2855(VarCurr)& -v2861(VarCurr)& -v2867(VarCurr)&v2875(VarCurr)-> (v2883(VarCurr)<->v1139(VarCurr,bitIndex5))).
% 78.16/77.55  all VarCurr (-v2854(VarCurr)& -v2855(VarCurr)& -v2861(VarCurr)&v2867(VarCurr)-> (v2883(VarCurr)<->v1124(VarCurr,bitIndex5))).
% 78.16/77.55  all VarCurr (-v2854(VarCurr)& -v2855(VarCurr)&v2861(VarCurr)-> (v2883(VarCurr)<->v1109(VarCurr,bitIndex5))).
% 78.16/77.55  all VarCurr (-v2854(VarCurr)&v2855(VarCurr)-> (v2883(VarCurr)<->$T)).
% 78.16/77.55  all VarCurr (v2854(VarCurr)-> (v2883(VarCurr)<->v1094(VarCurr,bitIndex5))).
% 78.16/77.55  all VarCurr (v2850(VarCurr)<->v2851(VarCurr)|v2875(VarCurr)).
% 78.16/77.55  all VarCurr (v2875(VarCurr)<->v2876(VarCurr)&v2880(VarCurr)).
% 78.16/77.55  all VarCurr (v2880(VarCurr)<->v2881(VarCurr)&v2882(VarCurr)).
% 78.16/77.55  all VarCurr (-v2882(VarCurr)<->v2874(VarCurr)).
% 78.16/77.55  all VarCurr (v2881(VarCurr)<->v24(VarCurr)&v1200(VarCurr)).
% 78.16/77.55  all VarCurr (-v2876(VarCurr)<->v2877(VarCurr)).
% 78.16/77.55  all VarCurr (v2877(VarCurr)<->v2878(VarCurr)|v1187(VarCurr)).
% 78.16/77.55  all VarCurr (v2878(VarCurr)<->v2879(VarCurr)|v1174(VarCurr)).
% 78.16/77.55  all VarCurr (v2879(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.16/77.55  all VarCurr (v2851(VarCurr)<->v2852(VarCurr)|v2867(VarCurr)).
% 78.16/77.55  all VarCurr (v2867(VarCurr)<->v2868(VarCurr)&v2871(VarCurr)).
% 78.16/77.55  all VarCurr (v2871(VarCurr)<->v2872(VarCurr)&v2873(VarCurr)).
% 78.16/77.55  all VarCurr (-v2873(VarCurr)<->v2874(VarCurr)).
% 78.16/77.55  all VarCurr (v2874(VarCurr)<->v1177(VarCurr)|v1190(VarCurr)).
% 78.16/77.55  all VarCurr (v2872(VarCurr)<->v24(VarCurr)&v1187(VarCurr)).
% 78.16/77.55  all VarCurr (-v2868(VarCurr)<->v2869(VarCurr)).
% 78.16/77.55  all VarCurr (v2869(VarCurr)<->v2870(VarCurr)|v1174(VarCurr)).
% 78.16/77.55  all VarCurr (v2870(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.16/77.55  all VarCurr (v2852(VarCurr)<->v2853(VarCurr)|v2861(VarCurr)).
% 78.16/77.55  all VarCurr (v2861(VarCurr)<->v2862(VarCurr)&v2864(VarCurr)).
% 78.16/77.55  all VarCurr (v2864(VarCurr)<->v2865(VarCurr)&v2866(VarCurr)).
% 78.16/77.55  all VarCurr (-v2866(VarCurr)<->v1177(VarCurr)).
% 78.16/77.55  all VarCurr (v2865(VarCurr)<->v24(VarCurr)&v1174(VarCurr)).
% 78.16/77.55  all VarCurr (-v2862(VarCurr)<->v2863(VarCurr)).
% 78.16/77.55  all VarCurr (v2863(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.16/77.55  all VarCurr (v2853(VarCurr)<->v2854(VarCurr)|v2855(VarCurr)).
% 78.16/77.55  all VarCurr (v2855(VarCurr)<->v2856(VarCurr)&v2857(VarCurr)).
% 78.16/77.55  all VarCurr (v2857(VarCurr)<->v2858(VarCurr)&v2860(VarCurr)).
% 78.16/77.55  all VarCurr (-v2860(VarCurr)<->v1177(VarCurr)).
% 78.16/77.55  all VarCurr (v2858(VarCurr)<->v2859(VarCurr)&v1178(VarCurr)).
% 78.16/77.55  all VarCurr (v2859(VarCurr)<->v2584(VarCurr)&v395(VarCurr)).
% 78.16/77.55  all VarCurr (-v2856(VarCurr)<->v1158(VarCurr)).
% 78.16/77.55  all VarCurr (v2854(VarCurr)<->v24(VarCurr)&v1158(VarCurr)).
% 78.16/77.55  all VarCurr (v1139(VarCurr,bitIndex5)<->v1140(VarCurr,bitIndex5)).
% 78.16/77.55  all VarCurr (-v2848(VarCurr)-> (v1147(VarCurr,bitIndex0)<->$F)).
% 78.16/77.55  all VarCurr (v2848(VarCurr)-> (v1147(VarCurr,bitIndex0)<->v1146(VarCurr,bitIndex0))).
% 78.16/77.55  all VarCurr (-v2848(VarCurr)<->v1346(VarCurr)).
% 78.16/77.55  all VarCurr (-v395(VarCurr)-> (v1146(VarCurr,bitIndex0)<->$F)).
% 78.16/77.55  all VarCurr (v395(VarCurr)-> (v1146(VarCurr,bitIndex0)<->v2500(VarCurr,bitIndex0))).
% 78.16/77.55  all VarCurr (-v2586(VarCurr)-> (v2500(VarCurr,bitIndex0)<->$F)).
% 78.16/77.55  all VarCurr (v2586(VarCurr)-> (v2500(VarCurr,bitIndex0)<->$T)).
% 78.16/77.55  all VarCurr (v1124(VarCurr,bitIndex5)<->v1125(VarCurr,bitIndex5)).
% 78.16/77.55  all VarCurr (-v2844(VarCurr)-> (v1132(VarCurr,bitIndex0)<->$F)).
% 78.16/77.55  all VarCurr (v2844(VarCurr)-> (v1132(VarCurr,bitIndex0)<->v1131(VarCurr,bitIndex0))).
% 78.16/77.55  all VarCurr (-v2844(VarCurr)<->v1346(VarCurr)).
% 78.16/77.55  all VarCurr (-v395(VarCurr)-> (v1131(VarCurr,bitIndex0)<->$F)).
% 78.16/77.55  all VarCurr (v395(VarCurr)-> (v1131(VarCurr,bitIndex0)<->v2493(VarCurr,bitIndex0))).
% 78.16/77.55  all VarCurr (-v2586(VarCurr)-> (v2493(VarCurr,bitIndex0)<->$F)).
% 78.16/77.55  all VarCurr (v2586(VarCurr)-> (v2493(VarCurr,bitIndex0)<->$T)).
% 78.16/77.55  all VarCurr (v1109(VarCurr,bitIndex5)<->v1110(VarCurr,bitIndex5)).
% 78.16/77.55  all VarCurr (-v2840(VarCurr)-> (v1117(VarCurr,bitIndex0)<->$F)).
% 78.16/77.55  all VarCurr (v2840(VarCurr)-> (v1117(VarCurr,bitIndex0)<->v1116(VarCurr,bitIndex0))).
% 78.16/77.55  all VarCurr (-v2840(VarCurr)<->v1346(VarCurr)).
% 78.16/77.55  all VarCurr (-v395(VarCurr)-> (v1116(VarCurr,bitIndex0)<->$F)).
% 78.16/77.55  all VarCurr (v395(VarCurr)-> (v1116(VarCurr,bitIndex0)<->v2486(VarCurr,bitIndex0))).
% 78.16/77.55  all VarCurr (-v2586(VarCurr)-> (v2486(VarCurr,bitIndex0)<->$F)).
% 78.16/77.55  all VarCurr (v2586(VarCurr)-> (v2486(VarCurr,bitIndex0)<->$T)).
% 78.16/77.55  all VarCurr (v1094(VarCurr,bitIndex5)<->v1095(VarCurr,bitIndex5)).
% 78.16/77.55  all VarCurr (-v2836(VarCurr)-> (v1102(VarCurr,bitIndex0)<->$F)).
% 78.16/77.55  all VarCurr (v2836(VarCurr)-> (v1102(VarCurr,bitIndex0)<->v1101(VarCurr,bitIndex0))).
% 78.16/77.55  all VarCurr (-v2836(VarCurr)<->v1346(VarCurr)).
% 78.16/77.55  all VarCurr (-v395(VarCurr)-> (v1101(VarCurr,bitIndex0)<->$F)).
% 78.16/77.55  all VarCurr (v395(VarCurr)-> (v1101(VarCurr,bitIndex0)<->v2479(VarCurr,bitIndex0))).
% 78.16/77.55  all VarCurr (-v2586(VarCurr)-> (v2479(VarCurr,bitIndex0)<->$F)).
% 78.16/77.55  all VarCurr (v2586(VarCurr)-> (v2479(VarCurr,bitIndex0)<->$T)).
% 78.16/77.55  all VarCurr (v7(VarCurr,bitIndex1)<->v9(VarCurr,bitIndex1)).
% 78.16/77.55  all VarCurr (v9(VarCurr,bitIndex1)<->v11(VarCurr,bitIndex1)).
% 78.16/77.55  all VarCurr (v11(VarCurr,bitIndex1)<->v13(VarCurr,bitIndex1)).
% 78.16/77.55  all VarNext (v13(VarNext,bitIndex1)<->v2826(VarNext,bitIndex1)).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2827(VarNext)-> (all B (range_4_0(B)-> (v2826(VarNext,B)<->v13(VarCurr,B)))))).
% 78.16/77.55  all VarNext (v2827(VarNext)-> (all B (range_4_0(B)-> (v2826(VarNext,B)<->v2580(VarNext,B))))).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2827(VarNext)<->v2828(VarNext))).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2828(VarNext)<->v2830(VarNext)&v355(VarNext))).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2830(VarNext)<->v362(VarNext))).
% 78.16/77.55  all VarNext (v20(VarNext,bitIndex1)<->v2818(VarNext,bitIndex1)).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2819(VarNext)-> (all B (range_4_0(B)-> (v2818(VarNext,B)<->v20(VarCurr,B)))))).
% 78.16/77.55  all VarNext (v2819(VarNext)-> (all B (range_4_0(B)-> (v2818(VarNext,B)<->v2566(VarNext,B))))).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2819(VarNext)<->v2820(VarNext))).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2820(VarNext)<->v2822(VarNext)&v355(VarNext))).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2822(VarNext)<->v362(VarNext))).
% 78.16/77.55  all VarCurr (-v2763(VarCurr)-> (v22(VarCurr,bitIndex1)<->$F)).
% 78.16/77.55  all VarCurr (v2763(VarCurr)-> (v22(VarCurr,bitIndex1)<->v2815(VarCurr))).
% 78.16/77.55  all VarCurr (-v2768(VarCurr)& -v2769(VarCurr)& -v2781(VarCurr)& -v2789(VarCurr)& -v2796(VarCurr)&v2804(VarCurr)-> (v2815(VarCurr)<->$T)).
% 78.16/77.55  all VarCurr (-v2768(VarCurr)& -v2769(VarCurr)& -v2781(VarCurr)& -v2789(VarCurr)&v2796(VarCurr)-> (v2815(VarCurr)<->v1139(VarCurr,bitIndex6))).
% 78.16/77.55  all VarCurr (-v2768(VarCurr)& -v2769(VarCurr)& -v2781(VarCurr)&v2789(VarCurr)-> (v2815(VarCurr)<->v1124(VarCurr,bitIndex6))).
% 78.16/77.55  all VarCurr (-v2768(VarCurr)& -v2769(VarCurr)&v2781(VarCurr)-> (v2815(VarCurr)<->$T)).
% 78.16/77.55  all VarCurr (-v2768(VarCurr)&v2769(VarCurr)-> (v2815(VarCurr)<->v2816(VarCurr))).
% 78.16/77.55  all VarCurr (v2768(VarCurr)-> (v2815(VarCurr)<->v1094(VarCurr,bitIndex6))).
% 78.16/77.55  all VarCurr (-v24(VarCurr)-> (v2816(VarCurr)<->$T)).
% 78.16/77.55  all VarCurr (v24(VarCurr)-> (v2816(VarCurr)<->v1109(VarCurr,bitIndex6))).
% 78.16/77.55  all VarCurr (v2763(VarCurr)<->v2764(VarCurr)|v2804(VarCurr)).
% 78.16/77.55  all VarCurr (v2804(VarCurr)<->v2805(VarCurr)&v2810(VarCurr)).
% 78.16/77.55  all VarCurr (v2810(VarCurr)<->v2811(VarCurr)&v2814(VarCurr)).
% 78.16/77.55  all VarCurr (-v2814(VarCurr)<->v2780(VarCurr)).
% 78.16/77.55  all VarCurr (v2811(VarCurr)<->v2812(VarCurr)&v1266(VarCurr)).
% 78.16/77.55  all VarCurr (v2812(VarCurr)<->v446(VarCurr)&v2813(VarCurr)).
% 78.16/77.55  all VarCurr (-v2813(VarCurr)<->v1088(VarCurr)).
% 78.16/77.55  all VarCurr (-v2805(VarCurr)<->v2806(VarCurr)).
% 78.16/77.55  all VarCurr (v2806(VarCurr)<->v2807(VarCurr)|v1200(VarCurr)).
% 78.16/77.55  all VarCurr (v2807(VarCurr)<->v2808(VarCurr)|v1187(VarCurr)).
% 78.16/77.55  all VarCurr (v2808(VarCurr)<->v2809(VarCurr)|v1190(VarCurr)).
% 78.16/77.55  all VarCurr (v2809(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.16/77.55  all VarCurr (v2764(VarCurr)<->v2765(VarCurr)|v2796(VarCurr)).
% 78.16/77.55  all VarCurr (v2796(VarCurr)<->v2797(VarCurr)&v2801(VarCurr)).
% 78.16/77.55  all VarCurr (v2801(VarCurr)<->v2802(VarCurr)&v2803(VarCurr)).
% 78.16/77.55  all VarCurr (-v2803(VarCurr)<->v2780(VarCurr)).
% 78.16/77.55  all VarCurr (v2802(VarCurr)<->v24(VarCurr)&v1200(VarCurr)).
% 78.16/77.55  all VarCurr (-v2797(VarCurr)<->v2798(VarCurr)).
% 78.16/77.55  all VarCurr (v2798(VarCurr)<->v2799(VarCurr)|v1187(VarCurr)).
% 78.16/77.55  all VarCurr (v2799(VarCurr)<->v2800(VarCurr)|v1190(VarCurr)).
% 78.16/77.55  all VarCurr (v2800(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.16/77.55  all VarCurr (v2765(VarCurr)<->v2766(VarCurr)|v2789(VarCurr)).
% 78.16/77.55  all VarCurr (v2789(VarCurr)<->v2790(VarCurr)&v2793(VarCurr)).
% 78.16/77.55  all VarCurr (v2793(VarCurr)<->v2794(VarCurr)&v2795(VarCurr)).
% 78.16/77.55  all VarCurr (-v2795(VarCurr)<->v2780(VarCurr)).
% 78.16/77.55  all VarCurr (v2794(VarCurr)<->v24(VarCurr)&v1187(VarCurr)).
% 78.16/77.55  all VarCurr (-v2790(VarCurr)<->v2791(VarCurr)).
% 78.16/77.55  all VarCurr (v2791(VarCurr)<->v2792(VarCurr)|v1190(VarCurr)).
% 78.16/77.55  all VarCurr (v2792(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.16/77.55  all VarCurr (v2766(VarCurr)<->v2767(VarCurr)|v2781(VarCurr)).
% 78.16/77.55  all VarCurr (v2781(VarCurr)<->v2782(VarCurr)&v2784(VarCurr)).
% 78.16/77.55  all VarCurr (v2784(VarCurr)<->v2785(VarCurr)&v2788(VarCurr)).
% 78.16/77.55  all VarCurr (-v2788(VarCurr)<->v2780(VarCurr)).
% 78.16/77.55  all VarCurr (v2785(VarCurr)<->v2786(VarCurr)&v1190(VarCurr)).
% 78.16/77.55  all VarCurr (v2786(VarCurr)<->v446(VarCurr)&v2787(VarCurr)).
% 78.16/77.55  all VarCurr (-v2787(VarCurr)<->v1241(VarCurr)).
% 78.16/77.55  all VarCurr (-v2782(VarCurr)<->v2783(VarCurr)).
% 78.16/77.55  all VarCurr (v2783(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.16/77.55  all VarCurr (v2767(VarCurr)<->v2768(VarCurr)|v2769(VarCurr)).
% 78.16/77.55  all VarCurr (v2769(VarCurr)<->v2770(VarCurr)&v2771(VarCurr)).
% 78.16/77.55  all VarCurr (v2771(VarCurr)<->v2772(VarCurr)&v2779(VarCurr)).
% 78.16/77.55  all VarCurr (-v2779(VarCurr)<->v2780(VarCurr)).
% 78.16/77.55  all VarCurr (v2780(VarCurr)<->v1177(VarCurr)|v1178(VarCurr)).
% 78.16/77.55  all VarCurr (v2772(VarCurr)<->v2773(VarCurr)&v1174(VarCurr)).
% 78.16/77.55  all VarCurr (v2773(VarCurr)<->v24(VarCurr)|v2774(VarCurr)).
% 78.16/77.55  all VarCurr (v2774(VarCurr)<->v2775(VarCurr)&v2778(VarCurr)).
% 78.16/77.55  all VarCurr (-v2778(VarCurr)<->v24(VarCurr)).
% 78.16/77.55  all VarCurr (v2775(VarCurr)<->v2776(VarCurr)&v2777(VarCurr)).
% 78.16/77.55  all VarCurr (-v2777(VarCurr)<->v1171(VarCurr)).
% 78.16/77.55  all VarCurr (v2776(VarCurr)<->v446(VarCurr)&v452(VarCurr)).
% 78.16/77.55  all VarCurr (-v2770(VarCurr)<->v1158(VarCurr)).
% 78.16/77.55  all VarCurr (v2768(VarCurr)<->v24(VarCurr)&v1158(VarCurr)).
% 78.16/77.55  all VarCurr (v1139(VarCurr,bitIndex6)<->v1140(VarCurr,bitIndex6)).
% 78.16/77.55  all VarCurr (v1124(VarCurr,bitIndex6)<->v1125(VarCurr,bitIndex6)).
% 78.16/77.55  all VarCurr (v1109(VarCurr,bitIndex6)<->v1110(VarCurr,bitIndex6)).
% 78.16/77.55  all VarCurr (v1094(VarCurr,bitIndex6)<->v1095(VarCurr,bitIndex6)).
% 78.16/77.55  all VarCurr (v7(VarCurr,bitIndex2)<->v9(VarCurr,bitIndex2)).
% 78.16/77.55  all VarCurr (v9(VarCurr,bitIndex2)<->v11(VarCurr,bitIndex2)).
% 78.16/77.55  all VarCurr (v11(VarCurr,bitIndex2)<->v13(VarCurr,bitIndex2)).
% 78.16/77.55  all VarNext (v13(VarNext,bitIndex2)<->v2755(VarNext,bitIndex2)).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2756(VarNext)-> (all B (range_4_0(B)-> (v2755(VarNext,B)<->v13(VarCurr,B)))))).
% 78.16/77.55  all VarNext (v2756(VarNext)-> (all B (range_4_0(B)-> (v2755(VarNext,B)<->v2580(VarNext,B))))).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2756(VarNext)<->v2757(VarNext))).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2757(VarNext)<->v2759(VarNext)&v355(VarNext))).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2759(VarNext)<->v362(VarNext))).
% 78.16/77.55  all VarNext (v20(VarNext,bitIndex2)<->v2747(VarNext,bitIndex2)).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2748(VarNext)-> (all B (range_4_0(B)-> (v2747(VarNext,B)<->v20(VarCurr,B)))))).
% 78.16/77.55  all VarNext (v2748(VarNext)-> (all B (range_4_0(B)-> (v2747(VarNext,B)<->v2566(VarNext,B))))).
% 78.16/77.55  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2748(VarNext)<->v2749(VarNext))).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2749(VarNext)<->v2751(VarNext)&v355(VarNext))).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2751(VarNext)<->v362(VarNext))).
% 78.16/77.56  all VarCurr (-v2689(VarCurr)-> (v22(VarCurr,bitIndex2)<->$F)).
% 78.16/77.56  all VarCurr (v2689(VarCurr)-> (v22(VarCurr,bitIndex2)<->v2744(VarCurr))).
% 78.16/77.56  all VarCurr (-v2694(VarCurr)& -v2695(VarCurr)& -v2708(VarCurr)& -v2717(VarCurr)& -v2724(VarCurr)&v2732(VarCurr)-> (v2744(VarCurr)<->$T)).
% 78.16/77.56  all VarCurr (-v2694(VarCurr)& -v2695(VarCurr)& -v2708(VarCurr)& -v2717(VarCurr)&v2724(VarCurr)-> (v2744(VarCurr)<->v1139(VarCurr,bitIndex7))).
% 78.16/77.56  all VarCurr (-v2694(VarCurr)& -v2695(VarCurr)& -v2708(VarCurr)&v2717(VarCurr)-> (v2744(VarCurr)<->v1124(VarCurr,bitIndex7))).
% 78.16/77.56  all VarCurr (-v2694(VarCurr)& -v2695(VarCurr)&v2708(VarCurr)-> (v2744(VarCurr)<->$T)).
% 78.16/77.56  all VarCurr (-v2694(VarCurr)&v2695(VarCurr)-> (v2744(VarCurr)<->v2745(VarCurr))).
% 78.16/77.56  all VarCurr (v2694(VarCurr)-> (v2744(VarCurr)<->v1094(VarCurr,bitIndex7))).
% 78.16/77.56  all VarCurr (-v24(VarCurr)-> (v2745(VarCurr)<->$T)).
% 78.16/77.56  all VarCurr (v24(VarCurr)-> (v2745(VarCurr)<->v1109(VarCurr,bitIndex7))).
% 78.16/77.56  all VarCurr (v2689(VarCurr)<->v2690(VarCurr)|v2732(VarCurr)).
% 78.16/77.56  all VarCurr (v2732(VarCurr)<->v2733(VarCurr)&v2738(VarCurr)).
% 78.16/77.56  all VarCurr (v2738(VarCurr)<->v2739(VarCurr)&v2743(VarCurr)).
% 78.16/77.56  all VarCurr (-v2743(VarCurr)<->v2707(VarCurr)).
% 78.16/77.56  all VarCurr (v2739(VarCurr)<->v2740(VarCurr)&v1266(VarCurr)).
% 78.16/77.56  all VarCurr (v2740(VarCurr)<->v2741(VarCurr)&v2742(VarCurr)).
% 78.16/77.56  all VarCurr (-v2742(VarCurr)<->v1088(VarCurr)).
% 78.16/77.56  all VarCurr (-v2741(VarCurr)<->v446(VarCurr)).
% 78.16/77.56  all VarCurr (-v2733(VarCurr)<->v2734(VarCurr)).
% 78.16/77.56  all VarCurr (v2734(VarCurr)<->v2735(VarCurr)|v1200(VarCurr)).
% 78.16/77.56  all VarCurr (v2735(VarCurr)<->v2736(VarCurr)|v1187(VarCurr)).
% 78.16/77.56  all VarCurr (v2736(VarCurr)<->v2737(VarCurr)|v1190(VarCurr)).
% 78.16/77.56  all VarCurr (v2737(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.16/77.56  all VarCurr (v2690(VarCurr)<->v2691(VarCurr)|v2724(VarCurr)).
% 78.16/77.56  all VarCurr (v2724(VarCurr)<->v2725(VarCurr)&v2729(VarCurr)).
% 78.16/77.56  all VarCurr (v2729(VarCurr)<->v2730(VarCurr)&v2731(VarCurr)).
% 78.16/77.56  all VarCurr (-v2731(VarCurr)<->v2707(VarCurr)).
% 78.16/77.56  all VarCurr (v2730(VarCurr)<->v24(VarCurr)&v1200(VarCurr)).
% 78.16/77.56  all VarCurr (-v2725(VarCurr)<->v2726(VarCurr)).
% 78.16/77.56  all VarCurr (v2726(VarCurr)<->v2727(VarCurr)|v1187(VarCurr)).
% 78.16/77.56  all VarCurr (v2727(VarCurr)<->v2728(VarCurr)|v1190(VarCurr)).
% 78.16/77.56  all VarCurr (v2728(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.16/77.56  all VarCurr (v2691(VarCurr)<->v2692(VarCurr)|v2717(VarCurr)).
% 78.16/77.56  all VarCurr (v2717(VarCurr)<->v2718(VarCurr)&v2721(VarCurr)).
% 78.16/77.56  all VarCurr (v2721(VarCurr)<->v2722(VarCurr)&v2723(VarCurr)).
% 78.16/77.56  all VarCurr (-v2723(VarCurr)<->v2707(VarCurr)).
% 78.16/77.56  all VarCurr (v2722(VarCurr)<->v24(VarCurr)&v1187(VarCurr)).
% 78.16/77.56  all VarCurr (-v2718(VarCurr)<->v2719(VarCurr)).
% 78.16/77.56  all VarCurr (v2719(VarCurr)<->v2720(VarCurr)|v1190(VarCurr)).
% 78.16/77.56  all VarCurr (v2720(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.16/77.56  all VarCurr (v2692(VarCurr)<->v2693(VarCurr)|v2708(VarCurr)).
% 78.16/77.56  all VarCurr (v2708(VarCurr)<->v2709(VarCurr)&v2711(VarCurr)).
% 78.16/77.56  all VarCurr (v2711(VarCurr)<->v2712(VarCurr)&v2716(VarCurr)).
% 78.16/77.56  all VarCurr (-v2716(VarCurr)<->v2707(VarCurr)).
% 78.16/77.56  all VarCurr (v2712(VarCurr)<->v2713(VarCurr)&v1190(VarCurr)).
% 78.16/77.56  all VarCurr (v2713(VarCurr)<->v2714(VarCurr)&v2715(VarCurr)).
% 78.16/77.56  all VarCurr (-v2715(VarCurr)<->v1241(VarCurr)).
% 78.16/77.56  all VarCurr (-v2714(VarCurr)<->v446(VarCurr)).
% 78.16/77.56  all VarCurr (-v2709(VarCurr)<->v2710(VarCurr)).
% 78.16/77.56  all VarCurr (v2710(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.16/77.56  all VarCurr (v2693(VarCurr)<->v2694(VarCurr)|v2695(VarCurr)).
% 78.16/77.56  all VarCurr (v2695(VarCurr)<->v2696(VarCurr)&v2697(VarCurr)).
% 78.16/77.56  all VarCurr (v2697(VarCurr)<->v2698(VarCurr)&v2706(VarCurr)).
% 78.16/77.56  all VarCurr (-v2706(VarCurr)<->v2707(VarCurr)).
% 78.16/77.56  all VarCurr (v2707(VarCurr)<->v1177(VarCurr)|v1178(VarCurr)).
% 78.16/77.56  all VarCurr (v2698(VarCurr)<->v2699(VarCurr)&v1174(VarCurr)).
% 78.16/77.56  all VarCurr (v2699(VarCurr)<->v24(VarCurr)|v2700(VarCurr)).
% 78.16/77.56  all VarCurr (v2700(VarCurr)<->v2701(VarCurr)&v2705(VarCurr)).
% 78.16/77.56  all VarCurr (-v2705(VarCurr)<->v24(VarCurr)).
% 78.16/77.56  all VarCurr (v2701(VarCurr)<->v2702(VarCurr)&v2704(VarCurr)).
% 78.16/77.56  all VarCurr (-v2704(VarCurr)<->v1171(VarCurr)).
% 78.16/77.56  all VarCurr (v2702(VarCurr)<->v2703(VarCurr)&v452(VarCurr)).
% 78.16/77.56  all VarCurr (-v2703(VarCurr)<->v446(VarCurr)).
% 78.16/77.56  all VarCurr (-v2696(VarCurr)<->v1158(VarCurr)).
% 78.16/77.56  all VarCurr (v2694(VarCurr)<->v24(VarCurr)&v1158(VarCurr)).
% 78.16/77.56  all VarCurr (v1139(VarCurr,bitIndex7)<->v1140(VarCurr,bitIndex7)).
% 78.16/77.56  all VarCurr (v1124(VarCurr,bitIndex7)<->v1125(VarCurr,bitIndex7)).
% 78.16/77.56  all VarCurr (v1109(VarCurr,bitIndex7)<->v1110(VarCurr,bitIndex7)).
% 78.16/77.56  all VarCurr (v1094(VarCurr,bitIndex7)<->v1095(VarCurr,bitIndex7)).
% 78.16/77.56  all VarCurr (v7(VarCurr,bitIndex3)<->v9(VarCurr,bitIndex3)).
% 78.16/77.56  all VarCurr (v9(VarCurr,bitIndex3)<->v11(VarCurr,bitIndex3)).
% 78.16/77.56  all VarCurr (v11(VarCurr,bitIndex3)<->v13(VarCurr,bitIndex3)).
% 78.16/77.56  all VarNext (v13(VarNext,bitIndex3)<->v2681(VarNext,bitIndex3)).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2682(VarNext)-> (all B (range_4_0(B)-> (v2681(VarNext,B)<->v13(VarCurr,B)))))).
% 78.16/77.56  all VarNext (v2682(VarNext)-> (all B (range_4_0(B)-> (v2681(VarNext,B)<->v2580(VarNext,B))))).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2682(VarNext)<->v2683(VarNext))).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2683(VarNext)<->v2685(VarNext)&v355(VarNext))).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2685(VarNext)<->v362(VarNext))).
% 78.16/77.56  all VarNext (v20(VarNext,bitIndex3)<->v2673(VarNext,bitIndex3)).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2674(VarNext)-> (all B (range_4_0(B)-> (v2673(VarNext,B)<->v20(VarCurr,B)))))).
% 78.16/77.56  all VarNext (v2674(VarNext)-> (all B (range_4_0(B)-> (v2673(VarNext,B)<->v2566(VarNext,B))))).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2674(VarNext)<->v2675(VarNext))).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2675(VarNext)<->v2677(VarNext)&v355(VarNext))).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2677(VarNext)<->v362(VarNext))).
% 78.16/77.56  all VarCurr (-v2637(VarCurr)-> (v22(VarCurr,bitIndex3)<->$F)).
% 78.16/77.56  all VarCurr (v2637(VarCurr)-> (v22(VarCurr,bitIndex3)<->v2671(VarCurr))).
% 78.16/77.56  all VarCurr (-v2641(VarCurr)& -v2642(VarCurr)& -v2649(VarCurr)& -v2655(VarCurr)&v2663(VarCurr)-> (v2671(VarCurr)<->v1139(VarCurr,bitIndex8))).
% 78.16/77.56  all VarCurr (-v2641(VarCurr)& -v2642(VarCurr)& -v2649(VarCurr)&v2655(VarCurr)-> (v2671(VarCurr)<->v1124(VarCurr,bitIndex8))).
% 78.16/77.56  all VarCurr (-v2641(VarCurr)& -v2642(VarCurr)&v2649(VarCurr)-> (v2671(VarCurr)<->v1109(VarCurr,bitIndex8))).
% 78.16/77.56  all VarCurr (-v2641(VarCurr)&v2642(VarCurr)-> (v2671(VarCurr)<->$T)).
% 78.16/77.56  all VarCurr (v2641(VarCurr)-> (v2671(VarCurr)<->v1094(VarCurr,bitIndex8))).
% 78.16/77.56  all VarCurr (v2637(VarCurr)<->v2638(VarCurr)|v2663(VarCurr)).
% 78.16/77.56  all VarCurr (v2663(VarCurr)<->v2664(VarCurr)&v2668(VarCurr)).
% 78.16/77.56  all VarCurr (v2668(VarCurr)<->v2669(VarCurr)&v2670(VarCurr)).
% 78.16/77.56  all VarCurr (-v2670(VarCurr)<->v2662(VarCurr)).
% 78.16/77.56  all VarCurr (v2669(VarCurr)<->v24(VarCurr)&v1200(VarCurr)).
% 78.16/77.56  all VarCurr (-v2664(VarCurr)<->v2665(VarCurr)).
% 78.16/77.56  all VarCurr (v2665(VarCurr)<->v2666(VarCurr)|v1187(VarCurr)).
% 78.16/77.56  all VarCurr (v2666(VarCurr)<->v2667(VarCurr)|v1174(VarCurr)).
% 78.16/77.56  all VarCurr (v2667(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.16/77.56  all VarCurr (v2638(VarCurr)<->v2639(VarCurr)|v2655(VarCurr)).
% 78.16/77.56  all VarCurr (v2655(VarCurr)<->v2656(VarCurr)&v2659(VarCurr)).
% 78.16/77.56  all VarCurr (v2659(VarCurr)<->v2660(VarCurr)&v2661(VarCurr)).
% 78.16/77.56  all VarCurr (-v2661(VarCurr)<->v2662(VarCurr)).
% 78.16/77.56  all VarCurr (v2662(VarCurr)<->v1177(VarCurr)|v1190(VarCurr)).
% 78.16/77.56  all VarCurr (v2660(VarCurr)<->v24(VarCurr)&v1187(VarCurr)).
% 78.16/77.56  all VarCurr (-v2656(VarCurr)<->v2657(VarCurr)).
% 78.16/77.56  all VarCurr (v2657(VarCurr)<->v2658(VarCurr)|v1174(VarCurr)).
% 78.16/77.56  all VarCurr (v2658(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.16/77.56  all VarCurr (v2639(VarCurr)<->v2640(VarCurr)|v2649(VarCurr)).
% 78.16/77.56  all VarCurr (v2649(VarCurr)<->v2650(VarCurr)&v2652(VarCurr)).
% 78.16/77.56  all VarCurr (v2652(VarCurr)<->v2653(VarCurr)&v2654(VarCurr)).
% 78.16/77.56  all VarCurr (-v2654(VarCurr)<->v1177(VarCurr)).
% 78.16/77.56  all VarCurr (v2653(VarCurr)<->v24(VarCurr)&v1174(VarCurr)).
% 78.16/77.56  all VarCurr (-v2650(VarCurr)<->v2651(VarCurr)).
% 78.16/77.56  all VarCurr (v2651(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.16/77.56  all VarCurr (v2640(VarCurr)<->v2641(VarCurr)|v2642(VarCurr)).
% 78.16/77.56  all VarCurr (v2642(VarCurr)<->v2643(VarCurr)&v2644(VarCurr)).
% 78.16/77.56  all VarCurr (v2644(VarCurr)<->v2645(VarCurr)&v2648(VarCurr)).
% 78.16/77.56  all VarCurr (-v2648(VarCurr)<->v1177(VarCurr)).
% 78.16/77.56  all VarCurr (v2645(VarCurr)<->v2646(VarCurr)&v1178(VarCurr)).
% 78.16/77.56  all VarCurr (v2646(VarCurr)<->v2647(VarCurr)&v395(VarCurr)).
% 78.16/77.56  all VarCurr (-v2647(VarCurr)<->v2584(VarCurr)).
% 78.16/77.56  all VarCurr (-v2643(VarCurr)<->v1158(VarCurr)).
% 78.16/77.56  all VarCurr (v2641(VarCurr)<->v24(VarCurr)&v1158(VarCurr)).
% 78.16/77.56  all VarCurr (v1139(VarCurr,bitIndex8)<->v1140(VarCurr,bitIndex8)).
% 78.16/77.56  all VarCurr (-v2635(VarCurr)-> (v1147(VarCurr,bitIndex3)<->$F)).
% 78.16/77.56  all VarCurr (v2635(VarCurr)-> (v1147(VarCurr,bitIndex3)<->v1146(VarCurr,bitIndex3))).
% 78.16/77.56  all VarCurr (-v2635(VarCurr)<->v1346(VarCurr)).
% 78.16/77.56  all VarCurr (-v395(VarCurr)-> (v1146(VarCurr,bitIndex3)<->$F)).
% 78.16/77.56  all VarCurr (v395(VarCurr)-> (v1146(VarCurr,bitIndex3)<->v2500(VarCurr,bitIndex3))).
% 78.16/77.56  all VarCurr (-v2632(VarCurr)-> (v2500(VarCurr,bitIndex3)<->$F)).
% 78.16/77.56  all VarCurr (v2632(VarCurr)-> (v2500(VarCurr,bitIndex3)<->$T)).
% 78.16/77.56  all VarCurr (-v2632(VarCurr)<->v2586(VarCurr)).
% 78.16/77.56  all VarCurr (v1124(VarCurr,bitIndex8)<->v1125(VarCurr,bitIndex8)).
% 78.16/77.56  all VarCurr (-v2629(VarCurr)-> (v1132(VarCurr,bitIndex3)<->$F)).
% 78.16/77.56  all VarCurr (v2629(VarCurr)-> (v1132(VarCurr,bitIndex3)<->v1131(VarCurr,bitIndex3))).
% 78.16/77.56  all VarCurr (-v2629(VarCurr)<->v1346(VarCurr)).
% 78.16/77.56  all VarCurr (-v395(VarCurr)-> (v1131(VarCurr,bitIndex3)<->$F)).
% 78.16/77.56  all VarCurr (v395(VarCurr)-> (v1131(VarCurr,bitIndex3)<->v2493(VarCurr,bitIndex3))).
% 78.16/77.56  all VarCurr (-v2626(VarCurr)-> (v2493(VarCurr,bitIndex3)<->$F)).
% 78.16/77.56  all VarCurr (v2626(VarCurr)-> (v2493(VarCurr,bitIndex3)<->$T)).
% 78.16/77.56  all VarCurr (-v2626(VarCurr)<->v2586(VarCurr)).
% 78.16/77.56  all VarCurr (v1109(VarCurr,bitIndex8)<->v1110(VarCurr,bitIndex8)).
% 78.16/77.56  all VarCurr (-v2623(VarCurr)-> (v1117(VarCurr,bitIndex3)<->$F)).
% 78.16/77.56  all VarCurr (v2623(VarCurr)-> (v1117(VarCurr,bitIndex3)<->v1116(VarCurr,bitIndex3))).
% 78.16/77.56  all VarCurr (-v2623(VarCurr)<->v1346(VarCurr)).
% 78.16/77.56  all VarCurr (-v395(VarCurr)-> (v1116(VarCurr,bitIndex3)<->$F)).
% 78.16/77.56  all VarCurr (v395(VarCurr)-> (v1116(VarCurr,bitIndex3)<->v2486(VarCurr,bitIndex3))).
% 78.16/77.56  all VarCurr (-v2620(VarCurr)-> (v2486(VarCurr,bitIndex3)<->$F)).
% 78.16/77.56  all VarCurr (v2620(VarCurr)-> (v2486(VarCurr,bitIndex3)<->$T)).
% 78.16/77.56  all VarCurr (-v2620(VarCurr)<->v2586(VarCurr)).
% 78.16/77.56  all VarCurr (v1094(VarCurr,bitIndex8)<->v1095(VarCurr,bitIndex8)).
% 78.16/77.56  all VarCurr (-v2617(VarCurr)-> (v1102(VarCurr,bitIndex3)<->$F)).
% 78.16/77.56  all VarCurr (v2617(VarCurr)-> (v1102(VarCurr,bitIndex3)<->v1101(VarCurr,bitIndex3))).
% 78.16/77.56  all VarCurr (-v2617(VarCurr)<->v1346(VarCurr)).
% 78.16/77.56  all VarCurr (-v395(VarCurr)-> (v1101(VarCurr,bitIndex3)<->$F)).
% 78.16/77.56  all VarCurr (v395(VarCurr)-> (v1101(VarCurr,bitIndex3)<->v2479(VarCurr,bitIndex3))).
% 78.16/77.56  all VarCurr (-v2614(VarCurr)-> (v2479(VarCurr,bitIndex3)<->$F)).
% 78.16/77.56  all VarCurr (v2614(VarCurr)-> (v2479(VarCurr,bitIndex3)<->$T)).
% 78.16/77.56  all VarCurr (-v2614(VarCurr)<->v2586(VarCurr)).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2596(VarNext)-> (v2584(VarNext)<->v2584(VarCurr)))).
% 78.16/77.56  all VarNext (v2596(VarNext)-> (v2584(VarNext)<->v2609(VarNext))).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2609(VarNext)<->v2607(VarCurr))).
% 78.16/77.56  all VarCurr (-v369(VarCurr)-> (v2607(VarCurr)<->v2586(VarCurr))).
% 78.16/77.56  all VarCurr (v369(VarCurr)-> (v2607(VarCurr)<->$F)).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2596(VarNext)<->v2597(VarNext)&v2604(VarNext))).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2604(VarNext)<->v2602(VarCurr))).
% 78.16/77.56  all VarCurr (v2602(VarCurr)<->v369(VarCurr)|v2605(VarCurr)).
% 78.16/77.56  all VarCurr (v2605(VarCurr)<->v24(VarCurr)&v2606(VarCurr)).
% 78.16/77.56  all VarCurr (-v2606(VarCurr)<->v369(VarCurr)).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2597(VarNext)<->v2599(VarNext)&v355(VarNext))).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2599(VarNext)<->v362(VarNext))).
% 78.16/77.56  v2584(constB0)<->$F.
% 78.16/77.56  all VarCurr (v2586(VarCurr)<->v2588(VarCurr)&v454(VarCurr,bitIndex0)).
% 78.16/77.56  all VarCurr (-v2588(VarCurr)<->v2589(VarCurr)).
% 78.16/77.56  all VarCurr (v2589(VarCurr)<->v2591(VarCurr)|v454(VarCurr,bitIndex5)).
% 78.16/77.56  all VarCurr (v2591(VarCurr)<->v2592(VarCurr)|v454(VarCurr,bitIndex4)).
% 78.16/77.56  all VarCurr (v2592(VarCurr)<->v2593(VarCurr)|v454(VarCurr,bitIndex3)).
% 78.16/77.56  all VarCurr (v2593(VarCurr)<->v454(VarCurr,bitIndex1)|v454(VarCurr,bitIndex2)).
% 78.16/77.56  all VarCurr (v7(VarCurr,bitIndex4)<->v9(VarCurr,bitIndex4)).
% 78.16/77.56  all VarCurr (v9(VarCurr,bitIndex4)<->v11(VarCurr,bitIndex4)).
% 78.16/77.56  all VarCurr (v11(VarCurr,bitIndex4)<->v13(VarCurr,bitIndex4)).
% 78.16/77.56  all VarNext (v13(VarNext,bitIndex4)<->v2571(VarNext,bitIndex4)).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2572(VarNext)-> (all B (range_4_0(B)-> (v2571(VarNext,B)<->v13(VarCurr,B)))))).
% 78.16/77.56  all VarNext (v2572(VarNext)-> (all B (range_4_0(B)-> (v2571(VarNext,B)<->v2580(VarNext,B))))).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_4_0(B)-> (v2580(VarNext,B)<->v2578(VarCurr,B))))).
% 78.16/77.56  all VarCurr (-v2567(VarCurr)-> (all B (range_4_0(B)-> (v2578(VarCurr,B)<->v20(VarCurr,B))))).
% 78.16/77.56  all VarCurr (v2567(VarCurr)-> (all B (range_4_0(B)-> (v2578(VarCurr,B)<->$F)))).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2572(VarNext)<->v2573(VarNext))).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2573(VarNext)<->v2575(VarNext)&v355(VarNext))).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2575(VarNext)<->v362(VarNext))).
% 78.16/77.56  all B (range_4_0(B)-> (v13(constB0,B)<->$F)).
% 78.16/77.56  all VarNext (v20(VarNext,bitIndex4)<->v2557(VarNext,bitIndex4)).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2558(VarNext)-> (all B (range_4_0(B)-> (v2557(VarNext,B)<->v20(VarCurr,B)))))).
% 78.16/77.56  all VarNext (v2558(VarNext)-> (all B (range_4_0(B)-> (v2557(VarNext,B)<->v2566(VarNext,B))))).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_4_0(B)-> (v2566(VarNext,B)<->v2564(VarCurr,B))))).
% 78.16/77.56  all VarCurr (-v2567(VarCurr)-> (all B (range_4_0(B)-> (v2564(VarCurr,B)<->v22(VarCurr,B))))).
% 78.16/77.56  all VarCurr (v2567(VarCurr)-> (all B (range_4_0(B)-> (v2564(VarCurr,B)<->$F)))).
% 78.16/77.56  all VarCurr (-v2567(VarCurr)<->v15(VarCurr)).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2558(VarNext)<->v2559(VarNext))).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2559(VarNext)<->v2560(VarNext)&v355(VarNext))).
% 78.16/77.56  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2560(VarNext)<->v362(VarNext))).
% 78.16/77.56  all B (range_4_0(B)-> (v20(constB0,B)<->$F)).
% 78.16/77.56  -b00000(bitIndex4).
% 78.16/77.56  -b00000(bitIndex3).
% 78.16/77.56  -b00000(bitIndex2).
% 78.16/77.57  -b00000(bitIndex1).
% 78.16/77.57  -b00000(bitIndex0).
% 78.16/77.57  all VarCurr (-v2506(VarCurr)-> (v22(VarCurr,bitIndex4)<->$F)).
% 78.16/77.57  all VarCurr (v2506(VarCurr)-> (v22(VarCurr,bitIndex4)<->v2552(VarCurr))).
% 78.16/77.57  all VarCurr (-v2510(VarCurr)& -v2511(VarCurr)& -v2517(VarCurr)& -v2530(VarCurr)&v2541(VarCurr)-> (v2552(VarCurr)<->v2555(VarCurr))).
% 78.16/77.57  all VarCurr (-v2510(VarCurr)& -v2511(VarCurr)& -v2517(VarCurr)&v2530(VarCurr)-> (v2552(VarCurr)<->v2554(VarCurr))).
% 78.16/77.57  all VarCurr (-v2510(VarCurr)& -v2511(VarCurr)&v2517(VarCurr)-> (v2552(VarCurr)<->v2553(VarCurr))).
% 78.16/77.57  all VarCurr (-v2510(VarCurr)&v2511(VarCurr)-> (v2552(VarCurr)<->$T)).
% 78.16/77.57  all VarCurr (v2510(VarCurr)-> (v2552(VarCurr)<->v1094(VarCurr,bitIndex9))).
% 78.16/77.57  all VarCurr (-v24(VarCurr)-> (v2555(VarCurr)<->$T)).
% 78.16/77.57  all VarCurr (v24(VarCurr)-> (v2555(VarCurr)<->v1139(VarCurr,bitIndex9))).
% 78.16/77.57  all VarCurr (-v24(VarCurr)-> (v2554(VarCurr)<->$T)).
% 78.16/77.57  all VarCurr (v24(VarCurr)-> (v2554(VarCurr)<->v1124(VarCurr,bitIndex9))).
% 78.16/77.57  all VarCurr (-v24(VarCurr)-> (v2553(VarCurr)<->$T)).
% 78.16/77.57  all VarCurr (v24(VarCurr)-> (v2553(VarCurr)<->v1109(VarCurr,bitIndex9))).
% 78.16/77.57  all VarCurr (v2506(VarCurr)<->v2507(VarCurr)|v2541(VarCurr)).
% 78.16/77.57  all VarCurr (v2541(VarCurr)<->v2542(VarCurr)&v2546(VarCurr)).
% 78.16/77.57  all VarCurr (v2546(VarCurr)<->v2547(VarCurr)&v2551(VarCurr)).
% 78.16/77.57  all VarCurr (-v2551(VarCurr)<->v2540(VarCurr)).
% 78.16/77.57  all VarCurr (v2547(VarCurr)<->v2548(VarCurr)&v1200(VarCurr)).
% 78.16/77.57  all VarCurr (v2548(VarCurr)<->v24(VarCurr)|v2549(VarCurr)).
% 78.16/77.57  all VarCurr (v2549(VarCurr)<->v768(VarCurr)&v2550(VarCurr)).
% 78.16/77.57  all VarCurr (-v2550(VarCurr)<->v24(VarCurr)).
% 78.16/77.57  all VarCurr (-v2542(VarCurr)<->v2543(VarCurr)).
% 78.16/77.57  all VarCurr (v2543(VarCurr)<->v2544(VarCurr)|v1187(VarCurr)).
% 78.16/77.57  all VarCurr (v2544(VarCurr)<->v2545(VarCurr)|v1174(VarCurr)).
% 78.16/77.57  all VarCurr (v2545(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.16/77.57  all VarCurr (v2507(VarCurr)<->v2508(VarCurr)|v2530(VarCurr)).
% 78.16/77.57  all VarCurr (v2530(VarCurr)<->v2531(VarCurr)&v2534(VarCurr)).
% 78.16/77.57  all VarCurr (v2534(VarCurr)<->v2535(VarCurr)&v2539(VarCurr)).
% 78.16/77.57  all VarCurr (-v2539(VarCurr)<->v2540(VarCurr)).
% 78.16/77.57  all VarCurr (v2540(VarCurr)<->v1177(VarCurr)|v1190(VarCurr)).
% 78.16/77.57  all VarCurr (v2535(VarCurr)<->v2536(VarCurr)&v1187(VarCurr)).
% 78.16/77.57  all VarCurr (v2536(VarCurr)<->v24(VarCurr)|v2537(VarCurr)).
% 78.16/77.57  all VarCurr (v2537(VarCurr)<->v768(VarCurr)&v2538(VarCurr)).
% 78.16/77.57  all VarCurr (-v2538(VarCurr)<->v24(VarCurr)).
% 78.16/77.57  all VarCurr (-v2531(VarCurr)<->v2532(VarCurr)).
% 78.16/77.57  all VarCurr (v2532(VarCurr)<->v2533(VarCurr)|v1174(VarCurr)).
% 78.16/77.57  all VarCurr (v2533(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.16/77.57  all VarCurr (v2508(VarCurr)<->v2509(VarCurr)|v2517(VarCurr)).
% 78.16/77.57  all VarCurr (v2517(VarCurr)<->v2518(VarCurr)&v2520(VarCurr)).
% 78.16/77.57  all VarCurr (v2520(VarCurr)<->v2521(VarCurr)&v2529(VarCurr)).
% 78.16/77.57  all VarCurr (-v2529(VarCurr)<->v1177(VarCurr)).
% 78.16/77.57  all VarCurr (v2521(VarCurr)<->v2522(VarCurr)&v1174(VarCurr)).
% 78.16/77.57  all VarCurr (v2522(VarCurr)<->v24(VarCurr)|v2523(VarCurr)).
% 78.16/77.57  all VarCurr (v2523(VarCurr)<->v2524(VarCurr)&v2528(VarCurr)).
% 78.16/77.57  all VarCurr (-v2528(VarCurr)<->v24(VarCurr)).
% 78.16/77.57  all VarCurr (v2524(VarCurr)<->v2525(VarCurr)&v2527(VarCurr)).
% 78.16/77.57  all VarCurr (-v2527(VarCurr)<->v1171(VarCurr)).
% 78.16/77.57  all VarCurr (v2525(VarCurr)<->v1167(VarCurr)&v2526(VarCurr)).
% 78.16/77.57  all VarCurr (-v2526(VarCurr)<->v452(VarCurr)).
% 78.16/77.57  all VarCurr (-v2518(VarCurr)<->v2519(VarCurr)).
% 78.16/77.57  all VarCurr (v2519(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.16/77.57  all VarCurr (v2509(VarCurr)<->v2510(VarCurr)|v2511(VarCurr)).
% 78.16/77.57  all VarCurr (v2511(VarCurr)<->v2512(VarCurr)&v2513(VarCurr)).
% 78.16/77.57  all VarCurr (v2513(VarCurr)<->v2514(VarCurr)&v2516(VarCurr)).
% 78.16/77.57  all VarCurr (-v2516(VarCurr)<->v1177(VarCurr)).
% 78.16/77.57  all VarCurr (v2514(VarCurr)<->v2515(VarCurr)&v1178(VarCurr)).
% 78.16/77.57  all VarCurr (-v2515(VarCurr)<->v395(VarCurr)).
% 78.16/77.57  all VarCurr (-v2512(VarCurr)<->v1158(VarCurr)).
% 78.16/77.57  all VarCurr (v2510(VarCurr)<->v24(VarCurr)&v1158(VarCurr)).
% 78.16/77.57  all VarCurr (v1139(VarCurr,bitIndex9)<->v1140(VarCurr,bitIndex9)).
% 78.16/77.57  all VarCurr (-v2504(VarCurr)-> (v1146(VarCurr,bitIndex4)<->$F)).
% 78.16/77.57  all VarCurr (v2504(VarCurr)-> (v1146(VarCurr,bitIndex4)<->v2500(VarCurr,bitIndex4))).
% 78.16/77.57  all VarCurr (-v2504(VarCurr)<->v1346(VarCurr)).
% 78.16/77.57  all VarCurr (-v2502(VarCurr)-> (v2500(VarCurr,bitIndex4)<->$F)).
% 78.16/77.57  all VarCurr (v2502(VarCurr)-> (v2500(VarCurr,bitIndex4)<->$T)).
% 78.16/77.57  all VarCurr (-v2502(VarCurr)<->v395(VarCurr)).
% 78.16/77.57  all VarCurr (v1124(VarCurr,bitIndex9)<->v1125(VarCurr,bitIndex9)).
% 78.16/77.57  all VarCurr (-v2497(VarCurr)-> (v1131(VarCurr,bitIndex4)<->$F)).
% 78.16/77.57  all VarCurr (v2497(VarCurr)-> (v1131(VarCurr,bitIndex4)<->v2493(VarCurr,bitIndex4))).
% 78.16/77.57  all VarCurr (-v2497(VarCurr)<->v1346(VarCurr)).
% 78.16/77.57  all VarCurr (-v2495(VarCurr)-> (v2493(VarCurr,bitIndex4)<->$F)).
% 78.16/77.57  all VarCurr (v2495(VarCurr)-> (v2493(VarCurr,bitIndex4)<->$T)).
% 78.16/77.57  all VarCurr (-v2495(VarCurr)<->v395(VarCurr)).
% 78.16/77.57  all VarCurr (v1109(VarCurr,bitIndex9)<->v1110(VarCurr,bitIndex9)).
% 78.16/77.57  all VarCurr (-v2490(VarCurr)-> (v1116(VarCurr,bitIndex4)<->$F)).
% 78.16/77.57  all VarCurr (v2490(VarCurr)-> (v1116(VarCurr,bitIndex4)<->v2486(VarCurr,bitIndex4))).
% 78.16/77.57  all VarCurr (-v2490(VarCurr)<->v1346(VarCurr)).
% 78.16/77.57  all VarCurr (-v2488(VarCurr)-> (v2486(VarCurr,bitIndex4)<->$F)).
% 78.16/77.57  all VarCurr (v2488(VarCurr)-> (v2486(VarCurr,bitIndex4)<->$T)).
% 78.16/77.57  all VarCurr (-v2488(VarCurr)<->v395(VarCurr)).
% 78.16/77.57  all VarCurr (v1094(VarCurr,bitIndex9)<->v1095(VarCurr,bitIndex9)).
% 78.16/77.57  all VarCurr (-v2483(VarCurr)-> (v1101(VarCurr,bitIndex4)<->$F)).
% 78.16/77.57  all VarCurr (v2483(VarCurr)-> (v1101(VarCurr,bitIndex4)<->v2479(VarCurr,bitIndex4))).
% 78.16/77.57  all VarCurr (-v2483(VarCurr)<->v1346(VarCurr)).
% 78.16/77.57  all VarCurr (-v2481(VarCurr)-> (v2479(VarCurr,bitIndex4)<->$F)).
% 78.16/77.57  all VarCurr (v2481(VarCurr)-> (v2479(VarCurr,bitIndex4)<->$T)).
% 78.16/77.57  all VarCurr (-v2481(VarCurr)<->v395(VarCurr)).
% 78.16/77.57  all VarCurr (v24(VarCurr)<->v26(VarCurr)).
% 78.16/77.57  all VarCurr (v26(VarCurr)<->v28(VarCurr)).
% 78.16/77.57  all VarCurr (v28(VarCurr)<->v2472(VarCurr)|v2475(VarCurr)).
% 78.16/77.57  all VarCurr (v2475(VarCurr)<->v81(VarCurr,bitIndex2)&v2476(VarCurr)).
% 78.16/77.57  all VarCurr (-v2476(VarCurr)<->v2344(VarCurr,bitIndex2)).
% 78.16/77.57  all VarCurr (v2472(VarCurr)<->v2473(VarCurr)&v2474(VarCurr)).
% 78.16/77.57  all VarCurr (-v2474(VarCurr)<->v2344(VarCurr,bitIndex2)).
% 78.16/77.57  all VarCurr (v2473(VarCurr)<->v30(VarCurr)&v81(VarCurr,bitIndex1)).
% 78.16/77.57  all VarCurr (v30(VarCurr)<->v2470(VarCurr)|v153(VarCurr)).
% 78.16/77.57  all VarCurr (v2470(VarCurr)<->v32(VarCurr)|v2468(VarCurr)).
% 78.16/77.57  all VarCurr (v2468(VarCurr)<->v342(VarCurr)).
% 78.16/77.57  all VarCurr (v32(VarCurr)<->v34(VarCurr)).
% 78.16/77.57  all VarCurr (v34(VarCurr)<->v36(VarCurr)).
% 78.16/77.57  all VarCurr (-v2464(VarCurr)-> (v36(VarCurr)<->$F)).
% 78.16/77.57  all VarCurr (v2464(VarCurr)-> (v36(VarCurr)<->$T)).
% 78.16/77.57  all VarCurr (v2464(VarCurr)<->v2465(VarCurr)|v2466(VarCurr)).
% 78.16/77.57  all VarCurr (v2466(VarCurr)<->v244(VarCurr)&v314(VarCurr)).
% 78.16/77.57  all VarCurr (v2465(VarCurr)<->v38(VarCurr)&v299(VarCurr)).
% 78.16/77.57  all VarCurr (v38(VarCurr)<-> (v40(VarCurr,bitIndex29)<->v2459(VarCurr,bitIndex15))& (v40(VarCurr,bitIndex28)<->v2459(VarCurr,bitIndex14))& (v40(VarCurr,bitIndex27)<->v2459(VarCurr,bitIndex13))& (v40(VarCurr,bitIndex26)<->v2459(VarCurr,bitIndex12))& (v40(VarCurr,bitIndex25)<->v2459(VarCurr,bitIndex11))& (v40(VarCurr,bitIndex24)<->v2459(VarCurr,bitIndex10))& (v40(VarCurr,bitIndex23)<->v2459(VarCurr,bitIndex9))& (v40(VarCurr,bitIndex22)<->v2459(VarCurr,bitIndex8))& (v40(VarCurr,bitIndex21)<->v2459(VarCurr,bitIndex7))& (v40(VarCurr,bitIndex20)<->v2459(VarCurr,bitIndex6))& (v40(VarCurr,bitIndex19)<->v2459(VarCurr,bitIndex5))& (v40(VarCurr,bitIndex18)<->v2459(VarCurr,bitIndex4))& (v40(VarCurr,bitIndex17)<->v2459(VarCurr,bitIndex3))& (v40(VarCurr,bitIndex16)<->v2459(VarCurr,bitIndex2))& (v40(VarCurr,bitIndex15)<->v2459(VarCurr,bitIndex1))& (v40(VarCurr,bitIndex14)<->v2459(VarCurr,bitIndex0))).
% 78.16/77.57  all VarCurr B (range_15_0(B)-> (v2459(VarCurr,B)<->v2461(VarCurr,B))).
% 78.16/77.57  all B (range_15_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B|bitIndex6=B|bitIndex7=B|bitIndex8=B|bitIndex9=B|bitIndex10=B|bitIndex11=B|bitIndex12=B|bitIndex13=B|bitIndex14=B|bitIndex15=B).
% 78.16/77.57  all VarCurr B (range_29_14(B)-> (v42(VarCurr,B)<->v44(VarCurr,B))).
% 78.16/77.57  all VarCurr B (range_29_14(B)-> (v44(VarCurr,B)<->v46(VarCurr,B))).
% 78.16/77.57  all VarCurr ((v46(VarCurr,bitIndex29)<->v48(VarCurr,bitIndex609))& (v46(VarCurr,bitIndex28)<->v48(VarCurr,bitIndex608))& (v46(VarCurr,bitIndex27)<->v48(VarCurr,bitIndex607))& (v46(VarCurr,bitIndex26)<->v48(VarCurr,bitIndex606))& (v46(VarCurr,bitIndex25)<->v48(VarCurr,bitIndex605))& (v46(VarCurr,bitIndex24)<->v48(VarCurr,bitIndex604))& (v46(VarCurr,bitIndex23)<->v48(VarCurr,bitIndex603))& (v46(VarCurr,bitIndex22)<->v48(VarCurr,bitIndex602))& (v46(VarCurr,bitIndex21)<->v48(VarCurr,bitIndex601))& (v46(VarCurr,bitIndex20)<->v48(VarCurr,bitIndex600))& (v46(VarCurr,bitIndex19)<->v48(VarCurr,bitIndex599))& (v46(VarCurr,bitIndex18)<->v48(VarCurr,bitIndex598))& (v46(VarCurr,bitIndex17)<->v48(VarCurr,bitIndex597))& (v46(VarCurr,bitIndex16)<->v48(VarCurr,bitIndex596))& (v46(VarCurr,bitIndex15)<->v48(VarCurr,bitIndex595))& (v46(VarCurr,bitIndex14)<->v48(VarCurr,bitIndex594))).
% 78.16/77.57  all VarNext ((v48(VarNext,bitIndex609)<->v2451(VarNext,bitIndex29))& (v48(VarNext,bitIndex608)<->v2451(VarNext,bitIndex28))& (v48(VarNext,bitIndex607)<->v2451(VarNext,bitIndex27))& (v48(VarNext,bitIndex606)<->v2451(VarNext,bitIndex26))& (v48(VarNext,bitIndex605)<->v2451(VarNext,bitIndex25))& (v48(VarNext,bitIndex604)<->v2451(VarNext,bitIndex24))& (v48(VarNext,bitIndex603)<->v2451(VarNext,bitIndex23))& (v48(VarNext,bitIndex602)<->v2451(VarNext,bitIndex22))& (v48(VarNext,bitIndex601)<->v2451(VarNext,bitIndex21))& (v48(VarNext,bitIndex600)<->v2451(VarNext,bitIndex20))& (v48(VarNext,bitIndex599)<->v2451(VarNext,bitIndex19))& (v48(VarNext,bitIndex598)<->v2451(VarNext,bitIndex18))& (v48(VarNext,bitIndex597)<->v2451(VarNext,bitIndex17))& (v48(VarNext,bitIndex596)<->v2451(VarNext,bitIndex16))& (v48(VarNext,bitIndex595)<->v2451(VarNext,bitIndex15))& (v48(VarNext,bitIndex594)<->v2451(VarNext,bitIndex14))).
% 78.16/77.57  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2453(VarNext)-> (v2451(VarNext,bitIndex115)<->v48(VarCurr,bitIndex695))& (v2451(VarNext,bitIndex114)<->v48(VarCurr,bitIndex694))& (v2451(VarNext,bitIndex113)<->v48(VarCurr,bitIndex693))& (v2451(VarNext,bitIndex112)<->v48(VarCurr,bitIndex692))& (v2451(VarNext,bitIndex111)<->v48(VarCurr,bitIndex691))& (v2451(VarNext,bitIndex110)<->v48(VarCurr,bitIndex690))& (v2451(VarNext,bitIndex109)<->v48(VarCurr,bitIndex689))& (v2451(VarNext,bitIndex108)<->v48(VarCurr,bitIndex688))& (v2451(VarNext,bitIndex107)<->v48(VarCurr,bitIndex687))& (v2451(VarNext,bitIndex106)<->v48(VarCurr,bitIndex686))& (v2451(VarNext,bitIndex105)<->v48(VarCurr,bitIndex685))& (v2451(VarNext,bitIndex104)<->v48(VarCurr,bitIndex684))& (v2451(VarNext,bitIndex103)<->v48(VarCurr,bitIndex683))& (v2451(VarNext,bitIndex102)<->v48(VarCurr,bitIndex682))& (v2451(VarNext,bitIndex101)<->v48(VarCurr,bitIndex681))& (v2451(VarNext,bitIndex100)<->v48(VarCurr,bitIndex680))& (v2451(VarNext,bitIndex99)<->v48(VarCurr,bitIndex679))& (v2451(VarNext,bitIndex98)<->v48(VarCurr,bitIndex678))& (v2451(VarNext,bitIndex97)<->v48(VarCurr,bitIndex677))& (v2451(VarNext,bitIndex96)<->v48(VarCurr,bitIndex676))& (v2451(VarNext,bitIndex95)<->v48(VarCurr,bitIndex675))& (v2451(VarNext,bitIndex94)<->v48(VarCurr,bitIndex674))& (v2451(VarNext,bitIndex93)<->v48(VarCurr,bitIndex673))& (v2451(VarNext,bitIndex92)<->v48(VarCurr,bitIndex672))& (v2451(VarNext,bitIndex91)<->v48(VarCurr,bitIndex671))& (v2451(VarNext,bitIndex90)<->v48(VarCurr,bitIndex670))& (v2451(VarNext,bitIndex89)<->v48(VarCurr,bitIndex669))& (v2451(VarNext,bitIndex88)<->v48(VarCurr,bitIndex668))& (v2451(VarNext,bitIndex87)<->v48(VarCurr,bitIndex667))& (v2451(VarNext,bitIndex86)<->v48(VarCurr,bitIndex666))& (v2451(VarNext,bitIndex85)<->v48(VarCurr,bitIndex665))& (v2451(VarNext,bitIndex84)<->v48(VarCurr,bitIndex664))& (v2451(VarNext,bitIndex83)<->v48(VarCurr,bitIndex663))& (v2451(VarNext,bitIndex82)<->v48(VarCurr,bitIndex662))& (v2451(VarNext,bitIndex81)<->v48(VarCurr,bitIndex661))& (v2451(VarNext,bitIndex80)<->v48(VarCurr,bitIndex660))& (v2451(VarNext,bitIndex79)<->v48(VarCurr,bitIndex659))& (v2451(VarNext,bitIndex78)<->v48(VarCurr,bitIndex658))& (v2451(VarNext,bitIndex77)<->v48(VarCurr,bitIndex657))& (v2451(VarNext,bitIndex76)<->v48(VarCurr,bitIndex656))& (v2451(VarNext,bitIndex75)<->v48(VarCurr,bitIndex655))& (v2451(VarNext,bitIndex74)<->v48(VarCurr,bitIndex654))& (v2451(VarNext,bitIndex73)<->v48(VarCurr,bitIndex653))& (v2451(VarNext,bitIndex72)<->v48(VarCurr,bitIndex652))& (v2451(VarNext,bitIndex71)<->v48(VarCurr,bitIndex651))& (v2451(VarNext,bitIndex70)<->v48(VarCurr,bitIndex650))& (v2451(VarNext,bitIndex69)<->v48(VarCurr,bitIndex649))& (v2451(VarNext,bitIndex68)<->v48(VarCurr,bitIndex648))& (v2451(VarNext,bitIndex67)<->v48(VarCurr,bitIndex647))& (v2451(VarNext,bitIndex66)<->v48(VarCurr,bitIndex646))& (v2451(VarNext,bitIndex65)<->v48(VarCurr,bitIndex645))& (v2451(VarNext,bitIndex64)<->v48(VarCurr,bitIndex644))& (v2451(VarNext,bitIndex63)<->v48(VarCurr,bitIndex643))& (v2451(VarNext,bitIndex62)<->v48(VarCurr,bitIndex642))& (v2451(VarNext,bitIndex61)<->v48(VarCurr,bitIndex641))& (v2451(VarNext,bitIndex60)<->v48(VarCurr,bitIndex640))& (v2451(VarNext,bitIndex59)<->v48(VarCurr,bitIndex639))& (v2451(VarNext,bitIndex58)<->v48(VarCurr,bitIndex638))& (v2451(VarNext,bitIndex57)<->v48(VarCurr,bitIndex637))& (v2451(VarNext,bitIndex56)<->v48(VarCurr,bitIndex636))& (v2451(VarNext,bitIndex55)<->v48(VarCurr,bitIndex635))& (v2451(VarNext,bitIndex54)<->v48(VarCurr,bitIndex634))& (v2451(VarNext,bitIndex53)<->v48(VarCurr,bitIndex633))& (v2451(VarNext,bitIndex52)<->v48(VarCurr,bitIndex632))& (v2451(VarNext,bitIndex51)<->v48(VarCurr,bitIndex631))& (v2451(VarNext,bitIndex50)<->v48(VarCurr,bitIndex630))& (v2451(VarNext,bitIndex49)<->v48(VarCurr,bitIndex629))& (v2451(VarNext,bitIndex48)<->v48(VarCurr,bitIndex628))& (v2451(VarNext,bitIndex47)<->v48(VarCurr,bitIndex627))& (v2451(VarNext,bitIndex46)<->v48(VarCurr,bitIndex626))& (v2451(VarNext,bitIndex45)<->v48(VarCurr,bitIndex625))& (v2451(VarNext,bitIndex44)<->v48(VarCurr,bitIndex624))& (v2451(VarNext,bitIndex43)<->v48(VarCurr,bitIndex623))& (v2451(VarNext,bitIndex42)<->v48(VarCurr,bitIndex622))& (v2451(VarNext,bitIndex41)<->v48(VarCurr,bitIndex621))& (v2451(VarNext,bitIndex40)<->v48(VarCurr,bitIndex620))& (v2451(VarNext,bitIndex39)<->v48(VarCurr,bitIndex619))& (v2451(VarNext,bitIndex38)<->v48(VarCurr,bitIndex618))& (v2451(VarNext,bitIndex37)<->v48(VarCurr,bitIndex617))& (v2451(VarNext,bitIndex36)<->v48(VarCurr,bitIndex616))& (v2451(VarNext,bitIndex35)<->v48(VarCurr,bitIndex615))& (v2451(VarNext,bitIndex34)<->v48(VarCurr,bitIndex614))& (v2451(VarNext,bitIndex33)<->v48(VarCurr,bitIndex613))& (v2451(VarNext,bitIndex32)<->v48(VarCurr,bitIndex612))& (v2451(VarNext,bitIndex31)<->v48(VarCurr,bitIndex611))& (v2451(VarNext,bitIndex30)<->v48(VarCurr,bitIndex610))& (v2451(VarNext,bitIndex29)<->v48(VarCurr,bitIndex609))& (v2451(VarNext,bitIndex28)<->v48(VarCurr,bitIndex608))& (v2451(VarNext,bitIndex27)<->v48(VarCurr,bitIndex607))& (v2451(VarNext,bitIndex26)<->v48(VarCurr,bitIndex606))& (v2451(VarNext,bitIndex25)<->v48(VarCurr,bitIndex605))& (v2451(VarNext,bitIndex24)<->v48(VarCurr,bitIndex604))& (v2451(VarNext,bitIndex23)<->v48(VarCurr,bitIndex603))& (v2451(VarNext,bitIndex22)<->v48(VarCurr,bitIndex602))& (v2451(VarNext,bitIndex21)<->v48(VarCurr,bitIndex601))& (v2451(VarNext,bitIndex20)<->v48(VarCurr,bitIndex600))& (v2451(VarNext,bitIndex19)<->v48(VarCurr,bitIndex599))& (v2451(VarNext,bitIndex18)<->v48(VarCurr,bitIndex598))& (v2451(VarNext,bitIndex17)<->v48(VarCurr,bitIndex597))& (v2451(VarNext,bitIndex16)<->v48(VarCurr,bitIndex596))& (v2451(VarNext,bitIndex15)<->v48(VarCurr,bitIndex595))& (v2451(VarNext,bitIndex14)<->v48(VarCurr,bitIndex594))& (v2451(VarNext,bitIndex13)<->v48(VarCurr,bitIndex593))& (v2451(VarNext,bitIndex12)<->v48(VarCurr,bitIndex592))& (v2451(VarNext,bitIndex11)<->v48(VarCurr,bitIndex591))& (v2451(VarNext,bitIndex10)<->v48(VarCurr,bitIndex590))& (v2451(VarNext,bitIndex9)<->v48(VarCurr,bitIndex589))& (v2451(VarNext,bitIndex8)<->v48(VarCurr,bitIndex588))& (v2451(VarNext,bitIndex7)<->v48(VarCurr,bitIndex587))& (v2451(VarNext,bitIndex6)<->v48(VarCurr,bitIndex586))& (v2451(VarNext,bitIndex5)<->v48(VarCurr,bitIndex585))& (v2451(VarNext,bitIndex4)<->v48(VarCurr,bitIndex584))& (v2451(VarNext,bitIndex3)<->v48(VarCurr,bitIndex583))& (v2451(VarNext,bitIndex2)<->v48(VarCurr,bitIndex582))& (v2451(VarNext,bitIndex1)<->v48(VarCurr,bitIndex581))& (v2451(VarNext,bitIndex0)<->v48(VarCurr,bitIndex580)))).
% 78.16/77.58  all VarNext (v2453(VarNext)-> (all B (range_115_0(B)-> (v2451(VarNext,B)<->v238(VarNext,B))))).
% 78.16/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2453(VarNext)<->v2454(VarNext)&v233(VarNext))).
% 78.16/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2454(VarNext)<->v2456(VarNext)&v188(VarNext))).
% 78.16/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2456(VarNext)<->v207(VarNext))).
% 78.16/77.58  all VarNext ((v48(VarNext,bitIndex493)<->v2443(VarNext,bitIndex29))& (v48(VarNext,bitIndex492)<->v2443(VarNext,bitIndex28))& (v48(VarNext,bitIndex491)<->v2443(VarNext,bitIndex27))& (v48(VarNext,bitIndex490)<->v2443(VarNext,bitIndex26))& (v48(VarNext,bitIndex489)<->v2443(VarNext,bitIndex25))& (v48(VarNext,bitIndex488)<->v2443(VarNext,bitIndex24))& (v48(VarNext,bitIndex487)<->v2443(VarNext,bitIndex23))& (v48(VarNext,bitIndex486)<->v2443(VarNext,bitIndex22))& (v48(VarNext,bitIndex485)<->v2443(VarNext,bitIndex21))& (v48(VarNext,bitIndex484)<->v2443(VarNext,bitIndex20))& (v48(VarNext,bitIndex483)<->v2443(VarNext,bitIndex19))& (v48(VarNext,bitIndex482)<->v2443(VarNext,bitIndex18))& (v48(VarNext,bitIndex481)<->v2443(VarNext,bitIndex17))& (v48(VarNext,bitIndex480)<->v2443(VarNext,bitIndex16))& (v48(VarNext,bitIndex479)<->v2443(VarNext,bitIndex15))& (v48(VarNext,bitIndex478)<->v2443(VarNext,bitIndex14))).
% 78.16/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2445(VarNext)-> (v2443(VarNext,bitIndex115)<->v48(VarCurr,bitIndex579))& (v2443(VarNext,bitIndex114)<->v48(VarCurr,bitIndex578))& (v2443(VarNext,bitIndex113)<->v48(VarCurr,bitIndex577))& (v2443(VarNext,bitIndex112)<->v48(VarCurr,bitIndex576))& (v2443(VarNext,bitIndex111)<->v48(VarCurr,bitIndex575))& (v2443(VarNext,bitIndex110)<->v48(VarCurr,bitIndex574))& (v2443(VarNext,bitIndex109)<->v48(VarCurr,bitIndex573))& (v2443(VarNext,bitIndex108)<->v48(VarCurr,bitIndex572))& (v2443(VarNext,bitIndex107)<->v48(VarCurr,bitIndex571))& (v2443(VarNext,bitIndex106)<->v48(VarCurr,bitIndex570))& (v2443(VarNext,bitIndex105)<->v48(VarCurr,bitIndex569))& (v2443(VarNext,bitIndex104)<->v48(VarCurr,bitIndex568))& (v2443(VarNext,bitIndex103)<->v48(VarCurr,bitIndex567))& (v2443(VarNext,bitIndex102)<->v48(VarCurr,bitIndex566))& (v2443(VarNext,bitIndex101)<->v48(VarCurr,bitIndex565))& (v2443(VarNext,bitIndex100)<->v48(VarCurr,bitIndex564))& (v2443(VarNext,bitIndex99)<->v48(VarCurr,bitIndex563))& (v2443(VarNext,bitIndex98)<->v48(VarCurr,bitIndex562))& (v2443(VarNext,bitIndex97)<->v48(VarCurr,bitIndex561))& (v2443(VarNext,bitIndex96)<->v48(VarCurr,bitIndex560))& (v2443(VarNext,bitIndex95)<->v48(VarCurr,bitIndex559))& (v2443(VarNext,bitIndex94)<->v48(VarCurr,bitIndex558))& (v2443(VarNext,bitIndex93)<->v48(VarCurr,bitIndex557))& (v2443(VarNext,bitIndex92)<->v48(VarCurr,bitIndex556))& (v2443(VarNext,bitIndex91)<->v48(VarCurr,bitIndex555))& (v2443(VarNext,bitIndex90)<->v48(VarCurr,bitIndex554))& (v2443(VarNext,bitIndex89)<->v48(VarCurr,bitIndex553))& (v2443(VarNext,bitIndex88)<->v48(VarCurr,bitIndex552))& (v2443(VarNext,bitIndex87)<->v48(VarCurr,bitIndex551))& (v2443(VarNext,bitIndex86)<->v48(VarCurr,bitIndex550))& (v2443(VarNext,bitIndex85)<->v48(VarCurr,bitIndex549))& (v2443(VarNext,bitIndex84)<->v48(VarCurr,bitIndex548))& (v2443(VarNext,bitIndex83)<->v48(VarCurr,bitIndex547))& (v2443(VarNext,bitIndex82)<->v48(VarCurr,bitIndex546))& (v2443(VarNext,bitIndex81)<->v48(VarCurr,bitIndex545))& (v2443(VarNext,bitIndex80)<->v48(VarCurr,bitIndex544))& (v2443(VarNext,bitIndex79)<->v48(VarCurr,bitIndex543))& (v2443(VarNext,bitIndex78)<->v48(VarCurr,bitIndex542))& (v2443(VarNext,bitIndex77)<->v48(VarCurr,bitIndex541))& (v2443(VarNext,bitIndex76)<->v48(VarCurr,bitIndex540))& (v2443(VarNext,bitIndex75)<->v48(VarCurr,bitIndex539))& (v2443(VarNext,bitIndex74)<->v48(VarCurr,bitIndex538))& (v2443(VarNext,bitIndex73)<->v48(VarCurr,bitIndex537))& (v2443(VarNext,bitIndex72)<->v48(VarCurr,bitIndex536))& (v2443(VarNext,bitIndex71)<->v48(VarCurr,bitIndex535))& (v2443(VarNext,bitIndex70)<->v48(VarCurr,bitIndex534))& (v2443(VarNext,bitIndex69)<->v48(VarCurr,bitIndex533))& (v2443(VarNext,bitIndex68)<->v48(VarCurr,bitIndex532))& (v2443(VarNext,bitIndex67)<->v48(VarCurr,bitIndex531))& (v2443(VarNext,bitIndex66)<->v48(VarCurr,bitIndex530))& (v2443(VarNext,bitIndex65)<->v48(VarCurr,bitIndex529))& (v2443(VarNext,bitIndex64)<->v48(VarCurr,bitIndex528))& (v2443(VarNext,bitIndex63)<->v48(VarCurr,bitIndex527))& (v2443(VarNext,bitIndex62)<->v48(VarCurr,bitIndex526))& (v2443(VarNext,bitIndex61)<->v48(VarCurr,bitIndex525))& (v2443(VarNext,bitIndex60)<->v48(VarCurr,bitIndex524))& (v2443(VarNext,bitIndex59)<->v48(VarCurr,bitIndex523))& (v2443(VarNext,bitIndex58)<->v48(VarCurr,bitIndex522))& (v2443(VarNext,bitIndex57)<->v48(VarCurr,bitIndex521))& (v2443(VarNext,bitIndex56)<->v48(VarCurr,bitIndex520))& (v2443(VarNext,bitIndex55)<->v48(VarCurr,bitIndex519))& (v2443(VarNext,bitIndex54)<->v48(VarCurr,bitIndex518))& (v2443(VarNext,bitIndex53)<->v48(VarCurr,bitIndex517))& (v2443(VarNext,bitIndex52)<->v48(VarCurr,bitIndex516))& (v2443(VarNext,bitIndex51)<->v48(VarCurr,bitIndex515))& (v2443(VarNext,bitIndex50)<->v48(VarCurr,bitIndex514))& (v2443(VarNext,bitIndex49)<->v48(VarCurr,bitIndex513))& (v2443(VarNext,bitIndex48)<->v48(VarCurr,bitIndex512))& (v2443(VarNext,bitIndex47)<->v48(VarCurr,bitIndex511))& (v2443(VarNext,bitIndex46)<->v48(VarCurr,bitIndex510))& (v2443(VarNext,bitIndex45)<->v48(VarCurr,bitIndex509))& (v2443(VarNext,bitIndex44)<->v48(VarCurr,bitIndex508))& (v2443(VarNext,bitIndex43)<->v48(VarCurr,bitIndex507))& (v2443(VarNext,bitIndex42)<->v48(VarCurr,bitIndex506))& (v2443(VarNext,bitIndex41)<->v48(VarCurr,bitIndex505))& (v2443(VarNext,bitIndex40)<->v48(VarCurr,bitIndex504))& (v2443(VarNext,bitIndex39)<->v48(VarCurr,bitIndex503))& (v2443(VarNext,bitIndex38)<->v48(VarCurr,bitIndex502))& (v2443(VarNext,bitIndex37)<->v48(VarCurr,bitIndex501))& (v2443(VarNext,bitIndex36)<->v48(VarCurr,bitIndex500))& (v2443(VarNext,bitIndex35)<->v48(VarCurr,bitIndex499))& (v2443(VarNext,bitIndex34)<->v48(VarCurr,bitIndex498))& (v2443(VarNext,bitIndex33)<->v48(VarCurr,bitIndex497))& (v2443(VarNext,bitIndex32)<->v48(VarCurr,bitIndex496))& (v2443(VarNext,bitIndex31)<->v48(VarCurr,bitIndex495))& (v2443(VarNext,bitIndex30)<->v48(VarCurr,bitIndex494))& (v2443(VarNext,bitIndex29)<->v48(VarCurr,bitIndex493))& (v2443(VarNext,bitIndex28)<->v48(VarCurr,bitIndex492))& (v2443(VarNext,bitIndex27)<->v48(VarCurr,bitIndex491))& (v2443(VarNext,bitIndex26)<->v48(VarCurr,bitIndex490))& (v2443(VarNext,bitIndex25)<->v48(VarCurr,bitIndex489))& (v2443(VarNext,bitIndex24)<->v48(VarCurr,bitIndex488))& (v2443(VarNext,bitIndex23)<->v48(VarCurr,bitIndex487))& (v2443(VarNext,bitIndex22)<->v48(VarCurr,bitIndex486))& (v2443(VarNext,bitIndex21)<->v48(VarCurr,bitIndex485))& (v2443(VarNext,bitIndex20)<->v48(VarCurr,bitIndex484))& (v2443(VarNext,bitIndex19)<->v48(VarCurr,bitIndex483))& (v2443(VarNext,bitIndex18)<->v48(VarCurr,bitIndex482))& (v2443(VarNext,bitIndex17)<->v48(VarCurr,bitIndex481))& (v2443(VarNext,bitIndex16)<->v48(VarCurr,bitIndex480))& (v2443(VarNext,bitIndex15)<->v48(VarCurr,bitIndex479))& (v2443(VarNext,bitIndex14)<->v48(VarCurr,bitIndex478))& (v2443(VarNext,bitIndex13)<->v48(VarCurr,bitIndex477))& (v2443(VarNext,bitIndex12)<->v48(VarCurr,bitIndex476))& (v2443(VarNext,bitIndex11)<->v48(VarCurr,bitIndex475))& (v2443(VarNext,bitIndex10)<->v48(VarCurr,bitIndex474))& (v2443(VarNext,bitIndex9)<->v48(VarCurr,bitIndex473))& (v2443(VarNext,bitIndex8)<->v48(VarCurr,bitIndex472))& (v2443(VarNext,bitIndex7)<->v48(VarCurr,bitIndex471))& (v2443(VarNext,bitIndex6)<->v48(VarCurr,bitIndex470))& (v2443(VarNext,bitIndex5)<->v48(VarCurr,bitIndex469))& (v2443(VarNext,bitIndex4)<->v48(VarCurr,bitIndex468))& (v2443(VarNext,bitIndex3)<->v48(VarCurr,bitIndex467))& (v2443(VarNext,bitIndex2)<->v48(VarCurr,bitIndex466))& (v2443(VarNext,bitIndex1)<->v48(VarCurr,bitIndex465))& (v2443(VarNext,bitIndex0)<->v48(VarCurr,bitIndex464)))).
% 78.16/77.58  all VarNext (v2445(VarNext)-> (all B (range_115_0(B)-> (v2443(VarNext,B)<->v219(VarNext,B))))).
% 78.16/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2445(VarNext)<->v2446(VarNext)&v213(VarNext))).
% 78.16/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2446(VarNext)<->v2448(VarNext)&v188(VarNext))).
% 78.16/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2448(VarNext)<->v207(VarNext))).
% 78.16/77.58  all VarCurr B (range_29_14(B)-> (v180(VarCurr,B)<->v182(VarCurr,B))).
% 78.16/77.58  all VarCurr B (range_29_14(B)-> (v182(VarCurr,B)<->v184(VarCurr,B))).
% 78.16/77.58  all VarCurr B (range_29_14(B)-> (v184(VarCurr,B)<->v186(VarCurr,B))).
% 78.16/77.58  all B (range_29_14(B)<->bitIndex14=B|bitIndex15=B|bitIndex16=B|bitIndex17=B|bitIndex18=B|bitIndex19=B|bitIndex20=B|bitIndex21=B|bitIndex22=B|bitIndex23=B|bitIndex24=B|bitIndex25=B|bitIndex26=B|bitIndex27=B|bitIndex28=B|bitIndex29=B).
% 78.16/77.58  all VarCurr (v57(VarCurr,bitIndex0)<->v59(VarCurr,bitIndex0)).
% 78.16/77.58  all VarCurr (v59(VarCurr,bitIndex0)<->v61(VarCurr,bitIndex0)).
% 78.16/77.58  all VarCurr (v61(VarCurr,bitIndex0)<->v166(VarCurr,bitIndex0)).
% 78.16/77.58  all VarCurr (v71(VarCurr)<->v73(VarCurr)).
% 78.16/77.58  all VarCurr (v73(VarCurr)<->v75(VarCurr)).
% 78.16/77.58  all VarCurr (v75(VarCurr)<->v77(VarCurr)).
% 78.16/77.58  all VarCurr (v77(VarCurr)<->v79(VarCurr)).
% 78.16/77.58  all VarCurr (v79(VarCurr)<->v81(VarCurr,bitIndex1)).
% 78.16/77.58  all VarCurr (-v2428(VarCurr)-> (v81(VarCurr,bitIndex1)<->$F)).
% 78.16/77.58  all VarCurr (v2428(VarCurr)-> (v81(VarCurr,bitIndex1)<->$T)).
% 78.16/77.58  all VarCurr (v2428(VarCurr)<->v2429(VarCurr)|v2438(VarCurr)).
% 78.16/77.58  all VarCurr (v2438(VarCurr)<->v2439(VarCurr)&v2440(VarCurr)).
% 78.16/77.58  all VarCurr (v2440(VarCurr)<->v2441(VarCurr)&v2418(VarCurr)).
% 78.16/77.58  all VarCurr (-v2441(VarCurr)<->v2224(VarCurr)).
% 78.16/77.58  all VarCurr (-v2439(VarCurr)<->v2390(VarCurr)).
% 78.16/77.58  all VarCurr (v2429(VarCurr)<->v2430(VarCurr)&v2390(VarCurr)).
% 78.16/77.58  all VarCurr (v2430(VarCurr)<->v2431(VarCurr)|v2435(VarCurr)).
% 78.16/77.58  all VarCurr (v2435(VarCurr)<->v2437(VarCurr)&v2379(VarCurr)).
% 78.16/77.58  all VarCurr (v2437(VarCurr)<->v2388(VarCurr)&v2383(VarCurr)).
% 78.16/77.58  all VarCurr (v2431(VarCurr)<->v2433(VarCurr)&v2379(VarCurr)).
% 78.16/77.58  all VarCurr (v2433(VarCurr)<->v2434(VarCurr)&v30(VarCurr)).
% 78.16/77.58  all VarCurr (v2434(VarCurr)<->v2388(VarCurr)&v2414(VarCurr)).
% 78.16/77.58  all VarNext (v2344(VarNext,bitIndex2)<->v2420(VarNext,bitIndex1)).
% 78.16/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2421(VarNext)-> (v2420(VarNext,bitIndex1)<->v2344(VarCurr,bitIndex2))& (v2420(VarNext,bitIndex0)<->v2344(VarCurr,bitIndex1)))).
% 78.16/77.58  all VarNext (v2421(VarNext)-> (all B (range_1_0(B)-> (v2420(VarNext,B)<->v2360(VarNext,B))))).
% 78.16/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2421(VarNext)<->v2422(VarNext))).
% 78.16/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2422(VarNext)<->v2424(VarNext)&v2317(VarNext))).
% 78.25/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2424(VarNext)<->v2324(VarNext))).
% 78.25/77.58  all VarCurr (-v2408(VarCurr)-> (v81(VarCurr,bitIndex2)<->$F)).
% 78.25/77.58  all VarCurr (v2408(VarCurr)-> (v81(VarCurr,bitIndex2)<->$T)).
% 78.25/77.58  all VarCurr (v2408(VarCurr)<->v2409(VarCurr)|v2415(VarCurr)).
% 78.25/77.58  all VarCurr (v2415(VarCurr)<->v2416(VarCurr)&v2417(VarCurr)).
% 78.25/77.58  all VarCurr (v2417(VarCurr)<->v2224(VarCurr)&v2418(VarCurr)).
% 78.25/77.58  all VarCurr (v2418(VarCurr)<-> ($T<->v2344(VarCurr,bitIndex2))).
% 78.25/77.58  all VarCurr (-v2416(VarCurr)<->v2390(VarCurr)).
% 78.25/77.58  all VarCurr (v2409(VarCurr)<->v2410(VarCurr)&v2390(VarCurr)).
% 78.25/77.58  all VarCurr (v2410(VarCurr)<->v2412(VarCurr)&v2379(VarCurr)).
% 78.25/77.58  all VarCurr (v2412(VarCurr)<->v2413(VarCurr)&v30(VarCurr)).
% 78.25/77.58  all VarCurr (v2413(VarCurr)<->v2378(VarCurr)&v2414(VarCurr)).
% 78.25/77.58  all VarCurr (-v2414(VarCurr)<->v2228(VarCurr)).
% 78.25/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2395(VarNext)-> (v2344(VarNext,bitIndex0)<->v2344(VarCurr,bitIndex0)))).
% 78.25/77.58  all VarNext (v2395(VarNext)-> (v2344(VarNext,bitIndex0)<->v2403(VarNext))).
% 78.25/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2403(VarNext)<->v2401(VarCurr))).
% 78.25/77.58  all VarCurr (-v2361(VarCurr)-> (v2401(VarCurr)<->v81(VarCurr,bitIndex0))).
% 78.25/77.58  all VarCurr (v2361(VarCurr)-> (v2401(VarCurr)<->$T)).
% 78.25/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2395(VarNext)<->v2396(VarNext))).
% 78.25/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2396(VarNext)<->v2398(VarNext)&v2317(VarNext))).
% 78.25/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2398(VarNext)<->v2324(VarNext))).
% 78.25/77.58  all VarCurr (-v2365(VarCurr)-> (v81(VarCurr,bitIndex0)<->$F)).
% 78.25/77.58  all VarCurr (v2365(VarCurr)-> (v81(VarCurr,bitIndex0)<->$T)).
% 78.25/77.58  all VarCurr (v2365(VarCurr)<->v2366(VarCurr)&v2390(VarCurr)).
% 78.25/77.58  all VarCurr (v2390(VarCurr)<->v2391(VarCurr)|v2392(VarCurr)).
% 78.25/77.58  all VarCurr (v2392(VarCurr)<-> ($T<->v2344(VarCurr,bitIndex1))).
% 78.25/77.58  all VarCurr (v2391(VarCurr)<-> ($T<->v2344(VarCurr,bitIndex0))).
% 78.25/77.58  all VarCurr (v2366(VarCurr)<->v2367(VarCurr)|v2384(VarCurr)).
% 78.25/77.58  all VarCurr (v2384(VarCurr)<->v2386(VarCurr)&v2379(VarCurr)).
% 78.25/77.58  all VarCurr (v2386(VarCurr)<->v2387(VarCurr)&v30(VarCurr)).
% 78.25/77.58  all VarCurr (v2387(VarCurr)<->v2388(VarCurr)&v2228(VarCurr)).
% 78.25/77.58  all VarCurr (v2388(VarCurr)<->v2389(VarCurr)&v2373(VarCurr)).
% 78.25/77.58  all VarCurr (-v2389(VarCurr)<->v2224(VarCurr)).
% 78.25/77.58  all VarCurr (v2367(VarCurr)<->v2368(VarCurr)|v2380(VarCurr)).
% 78.25/77.58  all VarCurr (v2380(VarCurr)<->v2382(VarCurr)&v2379(VarCurr)).
% 78.25/77.58  all VarCurr (v2382(VarCurr)<->v2378(VarCurr)&v2383(VarCurr)).
% 78.25/77.58  all VarCurr (-v2383(VarCurr)<->v30(VarCurr)).
% 78.25/77.58  all VarCurr (v2368(VarCurr)<->v2369(VarCurr)|v2374(VarCurr)).
% 78.25/77.58  all VarCurr (v2374(VarCurr)<->v2376(VarCurr)&v2379(VarCurr)).
% 78.25/77.58  all VarCurr (-v2379(VarCurr)<->v151(VarCurr)).
% 78.25/77.58  all VarCurr (v2376(VarCurr)<->v2377(VarCurr)&v30(VarCurr)).
% 78.25/77.58  all VarCurr (v2377(VarCurr)<->v2378(VarCurr)&v2228(VarCurr)).
% 78.25/77.58  all VarCurr (v2378(VarCurr)<->v2224(VarCurr)&v2373(VarCurr)).
% 78.25/77.58  all VarCurr (v2369(VarCurr)<->v2370(VarCurr)|v2371(VarCurr)).
% 78.25/77.58  all VarCurr (v2371(VarCurr)<->v2373(VarCurr)&v151(VarCurr)).
% 78.25/77.58  all VarCurr (-v2373(VarCurr)<->v83(VarCurr)).
% 78.25/77.58  all VarCurr (v2370(VarCurr)<->v83(VarCurr)).
% 78.25/77.58  all VarNext (v2344(VarNext,bitIndex1)<->v2351(VarNext,bitIndex0)).
% 78.25/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2352(VarNext)-> (v2351(VarNext,bitIndex1)<->v2344(VarCurr,bitIndex2))& (v2351(VarNext,bitIndex0)<->v2344(VarCurr,bitIndex1)))).
% 78.25/77.58  all VarNext (v2352(VarNext)-> (all B (range_1_0(B)-> (v2351(VarNext,B)<->v2360(VarNext,B))))).
% 78.25/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_1_0(B)-> (v2360(VarNext,B)<->v2358(VarCurr,B))))).
% 78.25/77.58  all VarCurr (-v2361(VarCurr)-> (v2358(VarCurr,bitIndex1)<->v81(VarCurr,bitIndex2))& (v2358(VarCurr,bitIndex0)<->v81(VarCurr,bitIndex1))).
% 78.25/77.58  all VarCurr (v2361(VarCurr)-> (all B (range_1_0(B)-> (v2358(VarCurr,B)<->$F)))).
% 78.25/77.58  all VarCurr (-v2361(VarCurr)<->v2230(VarCurr)).
% 78.25/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2352(VarNext)<->v2353(VarNext))).
% 78.25/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2353(VarNext)<->v2354(VarNext)&v2317(VarNext))).
% 78.25/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2354(VarNext)<->v2324(VarNext))).
% 78.25/77.58  v2344(constB0,bitIndex2)<->$F.
% 78.25/77.58  v2344(constB0,bitIndex1)<->$F.
% 78.25/77.58  v2344(constB0,bitIndex0)<->$T.
% 78.25/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2320(VarNext)-> (v2228(VarNext)<->v2228(VarCurr)))).
% 78.25/77.58  all VarNext (v2320(VarNext)-> (v2228(VarNext)<->v2337(VarNext))).
% 78.25/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2337(VarNext)<->v2335(VarCurr))).
% 78.25/77.58  all VarCurr (-v2331(VarCurr)-> (v2335(VarCurr)<->v2338(VarCurr))).
% 78.25/77.58  all VarCurr (v2331(VarCurr)-> (v2335(VarCurr)<->$F)).
% 78.25/77.58  all VarCurr (-v28(VarCurr)-> (v2338(VarCurr)<->$F)).
% 78.25/77.58  all VarCurr (v28(VarCurr)-> (v2338(VarCurr)<->$T)).
% 78.25/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2320(VarNext)<->v2321(VarNext)&v2330(VarNext))).
% 78.25/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2330(VarNext)<->v2328(VarCurr))).
% 78.25/77.58  all VarCurr (v2328(VarCurr)<->v2331(VarCurr)|v2332(VarCurr)).
% 78.25/77.58  all VarCurr (v2332(VarCurr)<->v2333(VarCurr)&v2334(VarCurr)).
% 78.25/77.58  all VarCurr (-v2334(VarCurr)<->v2331(VarCurr)).
% 78.25/77.58  all VarCurr (v2333(VarCurr)<-> -(v28(VarCurr)<->v2232(VarCurr))).
% 78.25/77.58  all VarCurr (-v2331(VarCurr)<->v2230(VarCurr)).
% 78.25/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2321(VarNext)<->v2322(VarNext)&v2317(VarNext))).
% 78.25/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2322(VarNext)<->v2324(VarNext))).
% 78.25/77.58  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2324(VarNext)<->v2317(VarCurr))).
% 78.25/77.58  v2228(constB0)<->$F.
% 78.25/77.58  all VarCurr (v2317(VarCurr)<->v1(VarCurr)).
% 78.25/77.58  all VarCurr (v2232(VarCurr)<->v2234(VarCurr)).
% 78.25/77.58  all VarCurr (v2234(VarCurr)<->v2236(VarCurr)).
% 78.25/77.58  all VarCurr (-v2267(VarCurr)-> (v2236(VarCurr)<->$F)).
% 78.25/77.58  all VarCurr (v2267(VarCurr)-> (v2236(VarCurr)<->v2305(VarCurr))).
% 78.25/77.58  all VarCurr (-v2273(VarCurr)& -v2274(VarCurr)& -v2277(VarCurr)& -v2280(VarCurr)& -v2283(VarCurr)& -v2288(VarCurr)&v2296(VarCurr)-> (v2305(VarCurr)<->v2315(VarCurr))).
% 78.25/77.58  all VarCurr (-v2273(VarCurr)& -v2274(VarCurr)& -v2277(VarCurr)& -v2280(VarCurr)& -v2283(VarCurr)&v2288(VarCurr)-> (v2305(VarCurr)<->v2314(VarCurr))).
% 78.25/77.58  all VarCurr (-v2273(VarCurr)& -v2274(VarCurr)& -v2277(VarCurr)& -v2280(VarCurr)&v2283(VarCurr)-> (v2305(VarCurr)<->v2243(VarCurr))).
% 78.25/77.58  all VarCurr (-v2273(VarCurr)& -v2274(VarCurr)& -v2277(VarCurr)&v2280(VarCurr)-> (v2305(VarCurr)<->v2306(VarCurr))).
% 78.25/77.58  all VarCurr (-v2273(VarCurr)& -v2274(VarCurr)&v2277(VarCurr)-> (v2305(VarCurr)<->v2243(VarCurr))).
% 78.25/77.58  all VarCurr (-v2273(VarCurr)&v2274(VarCurr)-> (v2305(VarCurr)<->$T)).
% 78.25/77.58  all VarCurr (v2273(VarCurr)-> (v2305(VarCurr)<->v1094(VarCurr,bitIndex0))).
% 78.25/77.58  all VarCurr (-v24(VarCurr)-> (v2315(VarCurr)<->v2243(VarCurr))).
% 78.25/77.58  all VarCurr (v24(VarCurr)-> (v2315(VarCurr)<->v1139(VarCurr,bitIndex0))).
% 78.25/77.58  all VarCurr (-v24(VarCurr)-> (v2314(VarCurr)<->v2243(VarCurr))).
% 78.25/77.58  all VarCurr (v24(VarCurr)-> (v2314(VarCurr)<->v1124(VarCurr,bitIndex0))).
% 78.25/77.58  all VarCurr (-v24(VarCurr)-> (v2306(VarCurr)<->v2307(VarCurr))).
% 78.25/77.58  all VarCurr (v24(VarCurr)-> (v2306(VarCurr)<->v1109(VarCurr,bitIndex0))).
% 78.25/77.58  all VarCurr (-v2308(VarCurr)-> (v2307(VarCurr)<->v2243(VarCurr))).
% 78.25/77.58  all VarCurr (v2308(VarCurr)-> (v2307(VarCurr)<->$T)).
% 78.25/77.58  all VarCurr (v2308(VarCurr)<->v2309(VarCurr)&v2313(VarCurr)).
% 78.25/77.58  all VarCurr (-v2313(VarCurr)<->v1171(VarCurr)).
% 78.25/77.58  all VarCurr (v2309(VarCurr)<->v2310(VarCurr)&v2312(VarCurr)).
% 78.25/77.58  all VarCurr (-v2312(VarCurr)<->v452(VarCurr)).
% 78.25/77.58  all VarCurr (v2310(VarCurr)<->v772(VarCurr)&v2311(VarCurr)).
% 78.25/77.58  all VarCurr (-v2311(VarCurr)<->v1167(VarCurr)).
% 78.25/77.58  all VarCurr (v2267(VarCurr)<->v2268(VarCurr)|v2296(VarCurr)).
% 78.25/77.58  all VarCurr (v2296(VarCurr)<->v2297(VarCurr)&v2301(VarCurr)).
% 78.25/77.58  all VarCurr (v2301(VarCurr)<->v2302(VarCurr)&v1200(VarCurr)).
% 78.25/77.58  all VarCurr (v2302(VarCurr)<->v24(VarCurr)|v2303(VarCurr)).
% 78.25/77.58  all VarCurr (v2303(VarCurr)<->v768(VarCurr)&v2304(VarCurr)).
% 78.25/77.58  all VarCurr (-v2304(VarCurr)<->v24(VarCurr)).
% 78.25/77.58  all VarCurr (-v2297(VarCurr)<->v2298(VarCurr)).
% 78.25/77.58  all VarCurr (v2298(VarCurr)<->v2299(VarCurr)|v1187(VarCurr)).
% 78.25/77.58  all VarCurr (v2299(VarCurr)<->v2300(VarCurr)|v1190(VarCurr)).
% 78.25/77.58  all VarCurr (v2300(VarCurr)<->v1158(VarCurr)|v1177(VarCurr)).
% 78.25/77.58  all VarCurr (v2268(VarCurr)<->v2269(VarCurr)|v2288(VarCurr)).
% 78.25/77.58  all VarCurr (v2288(VarCurr)<->v2289(VarCurr)&v2292(VarCurr)).
% 78.25/77.58  all VarCurr (v2292(VarCurr)<->v2293(VarCurr)&v1187(VarCurr)).
% 78.25/77.58  all VarCurr (v2293(VarCurr)<->v24(VarCurr)|v2294(VarCurr)).
% 78.25/77.58  all VarCurr (v2294(VarCurr)<->v768(VarCurr)&v2295(VarCurr)).
% 78.25/77.59  all VarCurr (-v2295(VarCurr)<->v24(VarCurr)).
% 78.25/77.59  all VarCurr (-v2289(VarCurr)<->v2290(VarCurr)).
% 78.25/77.59  all VarCurr (v2290(VarCurr)<->v2291(VarCurr)|v1190(VarCurr)).
% 78.25/77.59  all VarCurr (v2291(VarCurr)<->v1158(VarCurr)|v1177(VarCurr)).
% 78.25/77.59  all VarCurr (v2269(VarCurr)<->v2270(VarCurr)|v2283(VarCurr)).
% 78.25/77.59  all VarCurr (v2283(VarCurr)<->v2284(VarCurr)&v2286(VarCurr)).
% 78.25/77.59  all VarCurr (v2286(VarCurr)<->v2287(VarCurr)&v1190(VarCurr)).
% 78.25/77.59  all VarCurr (-v2287(VarCurr)<->v1241(VarCurr)).
% 78.25/77.59  all VarCurr (-v2284(VarCurr)<->v2285(VarCurr)).
% 78.25/77.59  all VarCurr (v2285(VarCurr)<->v1158(VarCurr)|v1177(VarCurr)).
% 78.25/77.59  all VarCurr (v2270(VarCurr)<->v2271(VarCurr)|v2280(VarCurr)).
% 78.25/77.59  all VarCurr (v2280(VarCurr)<->v2281(VarCurr)&v1174(VarCurr)).
% 78.25/77.59  all VarCurr (-v2281(VarCurr)<->v2282(VarCurr)).
% 78.25/77.59  all VarCurr (v2282(VarCurr)<->v1158(VarCurr)|v1177(VarCurr)).
% 78.25/77.59  all VarCurr (v2271(VarCurr)<->v2272(VarCurr)|v2277(VarCurr)).
% 78.25/77.59  all VarCurr (v2277(VarCurr)<->v2278(VarCurr)&v1178(VarCurr)).
% 78.25/77.59  all VarCurr (-v2278(VarCurr)<->v2279(VarCurr)).
% 78.25/77.59  all VarCurr (v2279(VarCurr)<->v1158(VarCurr)|v1177(VarCurr)).
% 78.25/77.59  all VarCurr (v2272(VarCurr)<->v2273(VarCurr)|v2274(VarCurr)).
% 78.25/77.59  all VarCurr (v2274(VarCurr)<->v2275(VarCurr)&v2276(VarCurr)).
% 78.25/77.59  all VarCurr (v2276(VarCurr)<->v388(VarCurr)&v1177(VarCurr)).
% 78.25/77.59  all VarCurr (-v2275(VarCurr)<->v1158(VarCurr)).
% 78.25/77.59  all VarCurr (v2273(VarCurr)<->v24(VarCurr)&v1158(VarCurr)).
% 78.25/77.59  all VarCurr (v1139(VarCurr,bitIndex0)<->v1140(VarCurr,bitIndex0)).
% 78.25/77.59  all VarCurr (-v1346(VarCurr)-> (v1152(VarCurr)<->$F)).
% 78.25/77.59  all VarCurr (v1346(VarCurr)-> (v1152(VarCurr)<->v2263(VarCurr))).
% 78.25/77.59  all VarCurr (-v1628(VarCurr)-> (v2263(VarCurr)<->$F)).
% 78.25/77.59  all VarCurr (v1628(VarCurr)-> (v2263(VarCurr)<->$T)).
% 78.25/77.59  all VarCurr (v1124(VarCurr,bitIndex0)<->v1125(VarCurr,bitIndex0)).
% 78.25/77.59  all VarCurr (-v1346(VarCurr)-> (v1137(VarCurr)<->$F)).
% 78.25/77.59  all VarCurr (v1346(VarCurr)-> (v1137(VarCurr)<->v2258(VarCurr))).
% 78.25/77.59  all VarCurr (-v1628(VarCurr)-> (v2258(VarCurr)<->$F)).
% 78.25/77.59  all VarCurr (v1628(VarCurr)-> (v2258(VarCurr)<->$T)).
% 78.25/77.59  all VarCurr (v1109(VarCurr,bitIndex0)<->v1110(VarCurr,bitIndex0)).
% 78.25/77.59  all VarCurr (-v1346(VarCurr)-> (v1122(VarCurr)<->$F)).
% 78.25/77.59  all VarCurr (v1346(VarCurr)-> (v1122(VarCurr)<->v2253(VarCurr))).
% 78.25/77.59  all VarCurr (-v1628(VarCurr)-> (v2253(VarCurr)<->$F)).
% 78.25/77.59  all VarCurr (v1628(VarCurr)-> (v2253(VarCurr)<->$T)).
% 78.25/77.59  all VarCurr (v2243(VarCurr)<->v2245(VarCurr)&v770(VarCurr,bitIndex0)).
% 78.25/77.59  all VarCurr (-v2245(VarCurr)<->v2246(VarCurr)).
% 78.25/77.59  all VarCurr (v2246(VarCurr)<->v2248(VarCurr)|v770(VarCurr,bitIndex5)).
% 78.25/77.59  all VarCurr (v2248(VarCurr)<->v2249(VarCurr)|v770(VarCurr,bitIndex4)).
% 78.25/77.59  all VarCurr (v2249(VarCurr)<->v2250(VarCurr)|v770(VarCurr,bitIndex3)).
% 78.25/77.59  all VarCurr (v2250(VarCurr)<->v770(VarCurr,bitIndex1)|v770(VarCurr,bitIndex2)).
% 78.25/77.59  all VarCurr (v1094(VarCurr,bitIndex0)<->v1095(VarCurr,bitIndex0)).
% 78.25/77.59  all VarCurr (-v1346(VarCurr)-> (v1107(VarCurr)<->$F)).
% 78.25/77.59  all VarCurr (v1346(VarCurr)-> (v1107(VarCurr)<->v2239(VarCurr))).
% 78.25/77.59  all VarCurr (-v1628(VarCurr)-> (v2239(VarCurr)<->$F)).
% 78.25/77.59  all VarCurr (v1628(VarCurr)-> (v2239(VarCurr)<->$T)).
% 78.25/77.59  all VarCurr (v2230(VarCurr)<->v17(VarCurr)).
% 78.25/77.59  all VarCurr (v2224(VarCurr)<->v2226(VarCurr)).
% 78.25/77.59  all VarCurr (v151(VarCurr)<->v2220(VarCurr)|v2221(VarCurr)).
% 78.25/77.59  all VarCurr (v2221(VarCurr)<->v2222(VarCurr)&v2136(VarCurr)).
% 78.25/77.59  all VarCurr (v2222(VarCurr)<->v2123(VarCurr)|v32(VarCurr)).
% 78.25/77.59  all VarCurr (v2220(VarCurr)<->v153(VarCurr)&v318(VarCurr)).
% 78.25/77.59  all VarCurr (v2136(VarCurr)<->v2138(VarCurr)).
% 78.25/77.59  all VarCurr (v2138(VarCurr)<->v2140(VarCurr)).
% 78.25/77.59  all VarCurr (v2140(VarCurr)<->v2217(VarCurr)&v2218(VarCurr)).
% 78.25/77.59  all VarCurr (v2218(VarCurr)<-> -(v2142(VarCurr,bitIndex4)<->v2146(VarCurr,bitIndex4))).
% 78.25/77.59  all VarCurr (v2217(VarCurr)<-> (v2142(VarCurr,bitIndex3)<->v2146(VarCurr,bitIndex3))& (v2142(VarCurr,bitIndex2)<->v2146(VarCurr,bitIndex2))& (v2142(VarCurr,bitIndex1)<->v2146(VarCurr,bitIndex1))& (v2142(VarCurr,bitIndex0)<->v2146(VarCurr,bitIndex0))).
% 78.25/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2198(VarNext)-> (v2146(VarNext,bitIndex4)<->v2146(VarCurr,bitIndex4)))).
% 78.25/77.59  all VarNext (v2198(VarNext)-> (v2146(VarNext,bitIndex4)<->v2212(VarNext))).
% 78.25/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2212(VarNext)<->v2210(VarCurr))).
% 78.25/77.59  all VarCurr (-v2167(VarCurr)-> (v2210(VarCurr)<->v2213(VarCurr))).
% 78.25/77.59  all VarCurr (v2167(VarCurr)-> (v2210(VarCurr)<->$F)).
% 78.25/77.59  all VarCurr (-v2213(VarCurr)<->v2146(VarCurr,bitIndex4)).
% 78.25/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2198(VarNext)<->v2199(VarNext)&v2206(VarNext))).
% 78.25/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2206(VarNext)<->v2204(VarCurr))).
% 78.25/77.59  all VarCurr (v2204(VarCurr)<->v2167(VarCurr)|v2207(VarCurr)).
% 78.25/77.59  all VarCurr (v2207(VarCurr)<->v2208(VarCurr)&v2209(VarCurr)).
% 78.25/77.59  all VarCurr (-v2209(VarCurr)<->v2167(VarCurr)).
% 78.25/77.59  all VarCurr (v2208(VarCurr)<->v2174(VarCurr)&v2148(VarCurr)).
% 78.25/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2199(VarNext)<->v2201(VarNext)&v2058(VarNext))).
% 78.25/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2201(VarNext)<->v2065(VarNext))).
% 78.25/77.59  all VarCurr (v2142(VarCurr,bitIndex4)<->v2144(VarCurr,bitIndex4)).
% 78.25/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2158(VarNext)-> (all B (range_3_0(B)-> (v2146(VarNext,B)<->v2146(VarCurr,B)))))).
% 78.25/77.59  all VarNext (v2158(VarNext)-> (all B (range_3_0(B)-> (v2146(VarNext,B)<->v2172(VarNext,B))))).
% 78.25/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_3_0(B)-> (v2172(VarNext,B)<->v2170(VarCurr,B))))).
% 78.25/77.59  all VarCurr (-v2167(VarCurr)-> (all B (range_3_0(B)-> (v2170(VarCurr,B)<->v2173(VarCurr,B))))).
% 78.25/77.59  all VarCurr (v2167(VarCurr)-> (all B (range_3_0(B)-> (v2170(VarCurr,B)<->$F)))).
% 78.25/77.59  all VarCurr (-v2174(VarCurr)-> (all B (range_3_0(B)-> (v2173(VarCurr,B)<->v2175(VarCurr,B))))).
% 78.25/77.59  all VarCurr (v2174(VarCurr)-> (all B (range_3_0(B)-> (v2173(VarCurr,B)<->$F)))).
% 78.25/77.59  all VarCurr (v2175(VarCurr,bitIndex0)<->v2191(VarCurr)).
% 78.25/77.59  all VarCurr (v2175(VarCurr,bitIndex1)<->v2189(VarCurr)).
% 78.25/77.59  all VarCurr (v2175(VarCurr,bitIndex2)<->v2184(VarCurr)).
% 78.25/77.59  all VarCurr (v2175(VarCurr,bitIndex3)<->v2177(VarCurr)).
% 78.25/77.59  all VarCurr (v2189(VarCurr)<->v2190(VarCurr)&v2193(VarCurr)).
% 78.25/77.59  all VarCurr (v2193(VarCurr)<->v2146(VarCurr,bitIndex0)|v2146(VarCurr,bitIndex1)).
% 78.25/77.59  all VarCurr (v2190(VarCurr)<->v2191(VarCurr)|v2192(VarCurr)).
% 78.25/77.59  all VarCurr (-v2192(VarCurr)<->v2146(VarCurr,bitIndex1)).
% 78.25/77.59  all VarCurr (-v2191(VarCurr)<->v2146(VarCurr,bitIndex0)).
% 78.25/77.59  all VarCurr (v2184(VarCurr)<->v2185(VarCurr)&v2188(VarCurr)).
% 78.25/77.59  all VarCurr (v2188(VarCurr)<->v2181(VarCurr)|v2146(VarCurr,bitIndex2)).
% 78.25/77.59  all VarCurr (v2185(VarCurr)<->v2186(VarCurr)|v2187(VarCurr)).
% 78.25/77.59  all VarCurr (-v2187(VarCurr)<->v2146(VarCurr,bitIndex2)).
% 78.25/77.59  all VarCurr (-v2186(VarCurr)<->v2181(VarCurr)).
% 78.25/77.59  all VarCurr (v2177(VarCurr)<->v2178(VarCurr)&v2183(VarCurr)).
% 78.25/77.59  all VarCurr (v2183(VarCurr)<->v2180(VarCurr)|v2146(VarCurr,bitIndex3)).
% 78.25/77.59  all VarCurr (v2178(VarCurr)<->v2179(VarCurr)|v2182(VarCurr)).
% 78.25/77.59  all VarCurr (-v2182(VarCurr)<->v2146(VarCurr,bitIndex3)).
% 78.25/77.59  all VarCurr (-v2179(VarCurr)<->v2180(VarCurr)).
% 78.25/77.59  all VarCurr (v2180(VarCurr)<->v2181(VarCurr)&v2146(VarCurr,bitIndex2)).
% 78.25/77.59  all VarCurr (v2181(VarCurr)<->v2146(VarCurr,bitIndex0)&v2146(VarCurr,bitIndex1)).
% 78.25/77.59  all VarCurr (v2174(VarCurr)<-> (v2146(VarCurr,bitIndex3)<->$T)& (v2146(VarCurr,bitIndex2)<->$F)& (v2146(VarCurr,bitIndex1)<->$T)& (v2146(VarCurr,bitIndex0)<->$T)).
% 78.25/77.59  b1011(bitIndex3).
% 78.25/77.59  -b1011(bitIndex2).
% 78.25/77.59  b1011(bitIndex1).
% 78.25/77.59  b1011(bitIndex0).
% 78.25/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2158(VarNext)<->v2159(VarNext)&v2166(VarNext))).
% 78.25/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2166(VarNext)<->v2164(VarCurr))).
% 78.25/77.59  all VarCurr (v2164(VarCurr)<->v2167(VarCurr)|v2168(VarCurr)).
% 78.25/77.59  all VarCurr (v2168(VarCurr)<->v2148(VarCurr)&v2169(VarCurr)).
% 78.25/77.59  all VarCurr (-v2169(VarCurr)<->v2167(VarCurr)).
% 78.25/77.59  all VarCurr (-v2167(VarCurr)<->v330(VarCurr)).
% 78.25/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2159(VarNext)<->v2160(VarNext)&v2058(VarNext))).
% 78.25/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2160(VarNext)<->v2065(VarNext))).
% 78.25/77.59  v2146(constB0,bitIndex4)<->$F.
% 78.25/77.59  all B (range_3_0(B)-> (v2146(constB0,B)<->$F)).
% 78.25/77.59  all VarCurr (v2148(VarCurr)<->v2150(VarCurr)).
% 78.25/77.59  all VarCurr (v2150(VarCurr)<->v2152(VarCurr)).
% 78.25/77.59  all VarCurr (v2152(VarCurr)<->v81(VarCurr,bitIndex1)&v2154(VarCurr)).
% 78.25/77.59  all VarCurr (v2154(VarCurr)<->v32(VarCurr)|v2123(VarCurr)).
% 78.25/77.59  all VarCurr B (range_3_0(B)-> (v2142(VarCurr,B)<->v2144(VarCurr,B))).
% 78.25/77.59  all VarCurr (v2123(VarCurr)<->v2125(VarCurr)).
% 78.25/77.59  all VarCurr (v2125(VarCurr)<->v2127(VarCurr)).
% 78.25/77.59  all VarCurr (-v2130(VarCurr)-> (v2127(VarCurr)<->$F)).
% 78.25/77.59  all VarCurr (v2130(VarCurr)-> (v2127(VarCurr)<->$T)).
% 78.25/77.59  all VarCurr (v2130(VarCurr)<->v2132(VarCurr)&v2134(VarCurr)).
% 78.25/77.59  all VarCurr (-v2134(VarCurr)<->v159(VarCurr,bitIndex6)).
% 78.25/77.59  all VarCurr (v2132(VarCurr)<->v2133(VarCurr)&v159(VarCurr,bitIndex5)).
% 78.25/77.59  all VarCurr (v2133(VarCurr)<->v309(VarCurr)&v159(VarCurr,bitIndex4)).
% 78.25/77.59  all VarCurr (v318(VarCurr)<->v320(VarCurr)).
% 78.25/77.59  all VarCurr (v320(VarCurr)<->v322(VarCurr)).
% 78.25/77.59  all VarCurr (v322(VarCurr)<->v2120(VarCurr)&v2121(VarCurr)).
% 78.25/77.59  all VarCurr (v2121(VarCurr)<-> -(v324(VarCurr,bitIndex5)<->v328(VarCurr,bitIndex5))).
% 78.25/77.59  all VarCurr (v2120(VarCurr)<-> (v324(VarCurr,bitIndex4)<->v328(VarCurr,bitIndex4))& (v324(VarCurr,bitIndex3)<->v328(VarCurr,bitIndex3))& (v324(VarCurr,bitIndex2)<->v328(VarCurr,bitIndex2))& (v324(VarCurr,bitIndex1)<->v328(VarCurr,bitIndex1))& (v324(VarCurr,bitIndex0)<->v328(VarCurr,bitIndex0))).
% 78.25/77.59  all VarNext (v328(VarNext,bitIndex5)<->v2112(VarNext,bitIndex5)).
% 78.25/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2114(VarNext)-> (all B (range_5_0(B)-> (v2112(VarNext,B)<->v328(VarCurr,B)))))).
% 78.25/77.59  all VarNext (v2114(VarNext)-> (all B (range_5_0(B)-> (v2112(VarNext,B)<->v2077(VarNext,B))))).
% 78.25/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2114(VarNext)<->v2115(VarNext)&v2071(VarNext))).
% 78.25/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2115(VarNext)<->v2117(VarNext)&v2058(VarNext))).
% 78.25/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2117(VarNext)<->v2065(VarNext))).
% 78.25/77.59  all VarCurr (v324(VarCurr,bitIndex5)<->v326(VarCurr,bitIndex5)).
% 78.25/77.59  all VarNext B (range_4_0(B)-> (v328(VarNext,B)<->v2060(VarNext,B))).
% 78.25/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2061(VarNext)-> (all B (range_5_0(B)-> (v2060(VarNext,B)<->v328(VarCurr,B)))))).
% 78.25/77.59  all VarNext (v2061(VarNext)-> (all B (range_5_0(B)-> (v2060(VarNext,B)<->v2077(VarNext,B))))).
% 78.25/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_5_0(B)-> (v2077(VarNext,B)<->v2075(VarCurr,B))))).
% 78.25/77.59  all VarCurr (-v2072(VarCurr)-> (all B (range_5_0(B)-> (v2075(VarCurr,B)<->v2078(VarCurr,B))))).
% 78.25/77.59  all VarCurr (v2072(VarCurr)-> (all B (range_5_0(B)-> (v2075(VarCurr,B)<->$F)))).
% 78.25/77.59  all VarCurr (v2078(VarCurr,bitIndex0)<->v2106(VarCurr)).
% 78.25/77.59  all VarCurr (v2078(VarCurr,bitIndex1)<->v2104(VarCurr)).
% 78.25/77.59  all VarCurr (v2078(VarCurr,bitIndex2)<->v2099(VarCurr)).
% 78.25/77.59  all VarCurr (v2078(VarCurr,bitIndex3)<->v2094(VarCurr)).
% 78.25/77.59  all VarCurr (v2078(VarCurr,bitIndex4)<->v2089(VarCurr)).
% 78.25/77.59  all VarCurr (v2078(VarCurr,bitIndex5)<->v2080(VarCurr)).
% 78.25/77.59  all VarCurr (v2104(VarCurr)<->v2105(VarCurr)&v2108(VarCurr)).
% 78.25/77.59  all VarCurr (v2108(VarCurr)<->v328(VarCurr,bitIndex0)|v328(VarCurr,bitIndex1)).
% 78.25/77.59  all VarCurr (v2105(VarCurr)<->v2106(VarCurr)|v2107(VarCurr)).
% 78.25/77.59  all VarCurr (-v2107(VarCurr)<->v328(VarCurr,bitIndex1)).
% 78.25/77.59  all VarCurr (-v2106(VarCurr)<->v328(VarCurr,bitIndex0)).
% 78.25/77.59  all VarCurr (v2099(VarCurr)<->v2100(VarCurr)&v2103(VarCurr)).
% 78.25/77.59  all VarCurr (v2103(VarCurr)<->v2086(VarCurr)|v328(VarCurr,bitIndex2)).
% 78.25/77.59  all VarCurr (v2100(VarCurr)<->v2101(VarCurr)|v2102(VarCurr)).
% 78.25/77.59  all VarCurr (-v2102(VarCurr)<->v328(VarCurr,bitIndex2)).
% 78.25/77.59  all VarCurr (-v2101(VarCurr)<->v2086(VarCurr)).
% 78.25/77.59  all VarCurr (v2094(VarCurr)<->v2095(VarCurr)&v2098(VarCurr)).
% 78.25/77.59  all VarCurr (v2098(VarCurr)<->v2085(VarCurr)|v328(VarCurr,bitIndex3)).
% 78.25/77.59  all VarCurr (v2095(VarCurr)<->v2096(VarCurr)|v2097(VarCurr)).
% 78.25/77.59  all VarCurr (-v2097(VarCurr)<->v328(VarCurr,bitIndex3)).
% 78.25/77.59  all VarCurr (-v2096(VarCurr)<->v2085(VarCurr)).
% 78.25/77.59  all VarCurr (v2089(VarCurr)<->v2090(VarCurr)&v2093(VarCurr)).
% 78.25/77.59  all VarCurr (v2093(VarCurr)<->v2084(VarCurr)|v328(VarCurr,bitIndex4)).
% 78.25/77.59  all VarCurr (v2090(VarCurr)<->v2091(VarCurr)|v2092(VarCurr)).
% 78.25/77.59  all VarCurr (-v2092(VarCurr)<->v328(VarCurr,bitIndex4)).
% 78.25/77.59  all VarCurr (-v2091(VarCurr)<->v2084(VarCurr)).
% 78.25/77.59  all VarCurr (v2080(VarCurr)<->v2081(VarCurr)&v2088(VarCurr)).
% 78.25/77.59  all VarCurr (v2088(VarCurr)<->v2083(VarCurr)|v328(VarCurr,bitIndex5)).
% 78.25/77.59  all VarCurr (v2081(VarCurr)<->v2082(VarCurr)|v2087(VarCurr)).
% 78.25/77.59  all VarCurr (-v2087(VarCurr)<->v328(VarCurr,bitIndex5)).
% 78.25/77.59  all VarCurr (-v2082(VarCurr)<->v2083(VarCurr)).
% 78.25/77.59  all VarCurr (v2083(VarCurr)<->v2084(VarCurr)&v328(VarCurr,bitIndex4)).
% 78.27/77.59  all VarCurr (v2084(VarCurr)<->v2085(VarCurr)&v328(VarCurr,bitIndex3)).
% 78.27/77.59  all VarCurr (v2085(VarCurr)<->v2086(VarCurr)&v328(VarCurr,bitIndex2)).
% 78.27/77.59  all VarCurr (v2086(VarCurr)<->v328(VarCurr,bitIndex0)&v328(VarCurr,bitIndex1)).
% 78.27/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2061(VarNext)<->v2062(VarNext)&v2071(VarNext))).
% 78.27/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2071(VarNext)<->v2069(VarCurr))).
% 78.27/77.59  all VarCurr (v2069(VarCurr)<->v2072(VarCurr)|v2073(VarCurr)).
% 78.27/77.59  all VarCurr (v2073(VarCurr)<->v332(VarCurr)&v2074(VarCurr)).
% 78.27/77.59  all VarCurr (-v2074(VarCurr)<->v2072(VarCurr)).
% 78.27/77.59  all VarCurr (-v2072(VarCurr)<->v330(VarCurr)).
% 78.27/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2062(VarNext)<->v2063(VarNext)&v2058(VarNext))).
% 78.27/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2063(VarNext)<->v2065(VarNext))).
% 78.27/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2065(VarNext)<->v2058(VarCurr))).
% 78.27/77.59  all B (range_5_0(B)-> (v328(constB0,B)<->$F)).
% 78.27/77.59  all VarCurr (v2058(VarCurr)<->v1(VarCurr)).
% 78.27/77.59  all VarCurr (v332(VarCurr)<->v334(VarCurr)).
% 78.27/77.59  all VarCurr (v334(VarCurr)<->v336(VarCurr)).
% 78.27/77.59  all VarCurr (v336(VarCurr)<->v2056(VarCurr)&v378(VarCurr)).
% 78.27/77.59  all VarCurr (-v2056(VarCurr)<->v338(VarCurr)).
% 78.27/77.59  all VarCurr (v378(VarCurr)<-> (v380(VarCurr,bitIndex1)<->$F)& (v380(VarCurr,bitIndex0)<->$T)).
% 78.27/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2029(VarNext)-> (all B (range_1_0(B)-> (v380(VarNext,B)<->v380(VarCurr,B)))))).
% 78.27/77.59  all VarNext (v2029(VarNext)-> (all B (range_1_0(B)-> (v380(VarNext,B)<->v2045(VarNext,B))))).
% 78.27/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_1_0(B)-> (v2045(VarNext,B)<->v2043(VarCurr,B))))).
% 78.27/77.59  all VarCurr (-v2038(VarCurr)-> (all B (range_1_0(B)-> (v2043(VarCurr,B)<->v2046(VarCurr,B))))).
% 78.27/77.59  all VarCurr (v2038(VarCurr)-> (all B (range_1_0(B)-> (v2043(VarCurr,B)<->$F)))).
% 78.27/77.59  all VarCurr (v2046(VarCurr,bitIndex0)<->v2050(VarCurr)).
% 78.27/77.59  all VarCurr (v2046(VarCurr,bitIndex1)<->v2048(VarCurr)).
% 78.27/77.59  all VarCurr (v2048(VarCurr)<->v1086(VarCurr)&v2049(VarCurr)).
% 78.27/77.59  all VarCurr (v2049(VarCurr)<->v2050(VarCurr)|v2051(VarCurr)).
% 78.27/77.59  all VarCurr (-v2051(VarCurr)<->v380(VarCurr,bitIndex1)).
% 78.27/77.59  all VarCurr (-v2050(VarCurr)<->v380(VarCurr,bitIndex0)).
% 78.27/77.59  all VarCurr (v1086(VarCurr)<->v380(VarCurr,bitIndex0)|v380(VarCurr,bitIndex1)).
% 78.27/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2029(VarNext)<->v2030(VarNext)&v2037(VarNext))).
% 78.27/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2037(VarNext)<->v2035(VarCurr))).
% 78.27/77.59  all VarCurr (v2035(VarCurr)<->v2038(VarCurr)|v2039(VarCurr)).
% 78.27/77.59  all VarCurr (v2039(VarCurr)<->v2040(VarCurr)&v2042(VarCurr)).
% 78.27/77.59  all VarCurr (-v2042(VarCurr)<->v2038(VarCurr)).
% 78.27/77.59  all VarCurr (v2040(VarCurr)<->v382(VarCurr)&v2041(VarCurr)).
% 78.27/77.59  all VarCurr (-v2041(VarCurr)<->v772(VarCurr)).
% 78.27/77.59  all VarCurr (-v2038(VarCurr)<->v15(VarCurr)).
% 78.27/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v2030(VarNext)<->v2031(VarNext)&v355(VarNext))).
% 78.27/77.59  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v2031(VarNext)<->v362(VarNext))).
% 78.27/77.59  all VarCurr (-v1961(VarCurr)-> (v382(VarCurr)<->$F)).
% 78.27/77.59  all VarCurr (v1961(VarCurr)-> (v382(VarCurr)<->v2023(VarCurr))).
% 78.27/77.59  all VarCurr (-v1967(VarCurr)& -v1968(VarCurr)& -v1972(VarCurr)& -v1985(VarCurr)& -v1992(VarCurr)& -v2002(VarCurr)&v2013(VarCurr)-> (v2023(VarCurr)<->$T)).
% 78.27/77.59  all VarCurr (-v1967(VarCurr)& -v1968(VarCurr)& -v1972(VarCurr)& -v1985(VarCurr)& -v1992(VarCurr)&v2002(VarCurr)-> (v2023(VarCurr)<->v2026(VarCurr))).
% 78.27/77.59  all VarCurr (-v1967(VarCurr)& -v1968(VarCurr)& -v1972(VarCurr)& -v1985(VarCurr)&v1992(VarCurr)-> (v2023(VarCurr)<->v2025(VarCurr))).
% 78.27/77.59  all VarCurr (-v1967(VarCurr)& -v1968(VarCurr)& -v1972(VarCurr)&v1985(VarCurr)-> (v2023(VarCurr)<->$T)).
% 78.27/77.59  all VarCurr (-v1967(VarCurr)& -v1968(VarCurr)&v1972(VarCurr)-> (v2023(VarCurr)<->v2024(VarCurr))).
% 78.27/77.59  all VarCurr (-v1967(VarCurr)&v1968(VarCurr)-> (v2023(VarCurr)<->$T)).
% 78.27/77.59  all VarCurr (v1967(VarCurr)-> (v2023(VarCurr)<->v1094(VarCurr,bitIndex4))).
% 78.27/77.59  all VarCurr (-v24(VarCurr)-> (v2026(VarCurr)<->$T)).
% 78.27/77.59  all VarCurr (v24(VarCurr)-> (v2026(VarCurr)<->v1139(VarCurr,bitIndex4))).
% 78.27/77.59  all VarCurr (-v24(VarCurr)-> (v2025(VarCurr)<->$T)).
% 78.27/77.59  all VarCurr (v24(VarCurr)-> (v2025(VarCurr)<->v1124(VarCurr,bitIndex4))).
% 78.27/77.60  all VarCurr (-v24(VarCurr)-> (v2024(VarCurr)<->$T)).
% 78.27/77.60  all VarCurr (v24(VarCurr)-> (v2024(VarCurr)<->v1109(VarCurr,bitIndex4))).
% 78.27/77.60  all VarCurr (v1961(VarCurr)<->v1962(VarCurr)|v2013(VarCurr)).
% 78.27/77.60  all VarCurr (v2013(VarCurr)<->v2014(VarCurr)&v2019(VarCurr)).
% 78.27/77.60  all VarCurr (v2019(VarCurr)<->v2020(VarCurr)&v2022(VarCurr)).
% 78.27/77.60  all VarCurr (-v2022(VarCurr)<->v1177(VarCurr)).
% 78.27/77.60  all VarCurr (v2020(VarCurr)<->v2021(VarCurr)&v1266(VarCurr)).
% 78.27/77.60  all VarCurr (-v2021(VarCurr)<->v1088(VarCurr)).
% 78.27/77.60  all VarCurr (-v2014(VarCurr)<->v2015(VarCurr)).
% 78.27/77.60  all VarCurr (v2015(VarCurr)<->v2016(VarCurr)|v1200(VarCurr)).
% 78.27/77.60  all VarCurr (v2016(VarCurr)<->v2017(VarCurr)|v1187(VarCurr)).
% 78.27/77.60  all VarCurr (v2017(VarCurr)<->v2018(VarCurr)|v1190(VarCurr)).
% 78.27/77.60  all VarCurr (v2018(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.27/77.60  all VarCurr (v1962(VarCurr)<->v1963(VarCurr)|v2002(VarCurr)).
% 78.27/77.60  all VarCurr (v2002(VarCurr)<->v2003(VarCurr)&v2007(VarCurr)).
% 78.27/77.60  all VarCurr (v2007(VarCurr)<->v2008(VarCurr)&v2012(VarCurr)).
% 78.27/77.60  all VarCurr (-v2012(VarCurr)<->v1177(VarCurr)).
% 78.27/77.60  all VarCurr (v2008(VarCurr)<->v2009(VarCurr)&v1200(VarCurr)).
% 78.27/77.60  all VarCurr (v2009(VarCurr)<->v24(VarCurr)|v2010(VarCurr)).
% 78.27/77.60  all VarCurr (v2010(VarCurr)<->v768(VarCurr)&v2011(VarCurr)).
% 78.27/77.60  all VarCurr (-v2011(VarCurr)<->v24(VarCurr)).
% 78.27/77.60  all VarCurr (-v2003(VarCurr)<->v2004(VarCurr)).
% 78.27/77.60  all VarCurr (v2004(VarCurr)<->v2005(VarCurr)|v1187(VarCurr)).
% 78.27/77.60  all VarCurr (v2005(VarCurr)<->v2006(VarCurr)|v1190(VarCurr)).
% 78.27/77.60  all VarCurr (v2006(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.27/77.60  all VarCurr (v1963(VarCurr)<->v1964(VarCurr)|v1992(VarCurr)).
% 78.27/77.60  all VarCurr (v1992(VarCurr)<->v1993(VarCurr)&v1996(VarCurr)).
% 78.27/77.60  all VarCurr (v1996(VarCurr)<->v1997(VarCurr)&v2001(VarCurr)).
% 78.27/77.60  all VarCurr (-v2001(VarCurr)<->v1177(VarCurr)).
% 78.27/77.60  all VarCurr (v1997(VarCurr)<->v1998(VarCurr)&v1187(VarCurr)).
% 78.27/77.60  all VarCurr (v1998(VarCurr)<->v24(VarCurr)|v1999(VarCurr)).
% 78.27/77.60  all VarCurr (v1999(VarCurr)<->v768(VarCurr)&v2000(VarCurr)).
% 78.27/77.60  all VarCurr (-v2000(VarCurr)<->v24(VarCurr)).
% 78.27/77.60  all VarCurr (-v1993(VarCurr)<->v1994(VarCurr)).
% 78.27/77.60  all VarCurr (v1994(VarCurr)<->v1995(VarCurr)|v1190(VarCurr)).
% 78.27/77.60  all VarCurr (v1995(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.27/77.60  all VarCurr (v1964(VarCurr)<->v1965(VarCurr)|v1985(VarCurr)).
% 78.27/77.60  all VarCurr (v1985(VarCurr)<->v1986(VarCurr)&v1988(VarCurr)).
% 78.27/77.60  all VarCurr (v1988(VarCurr)<->v1989(VarCurr)&v1991(VarCurr)).
% 78.27/77.60  all VarCurr (-v1991(VarCurr)<->v1177(VarCurr)).
% 78.27/77.60  all VarCurr (v1989(VarCurr)<->v1990(VarCurr)&v1190(VarCurr)).
% 78.27/77.60  all VarCurr (-v1990(VarCurr)<->v1241(VarCurr)).
% 78.27/77.60  all VarCurr (-v1986(VarCurr)<->v1987(VarCurr)).
% 78.27/77.60  all VarCurr (v1987(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.27/77.60  all VarCurr (v1965(VarCurr)<->v1966(VarCurr)|v1972(VarCurr)).
% 78.27/77.60  all VarCurr (v1972(VarCurr)<->v1973(VarCurr)&v1974(VarCurr)).
% 78.27/77.60  all VarCurr (v1974(VarCurr)<->v1975(VarCurr)&v1984(VarCurr)).
% 78.27/77.60  all VarCurr (-v1984(VarCurr)<->v1177(VarCurr)).
% 78.27/77.60  all VarCurr (v1975(VarCurr)<->v1976(VarCurr)&v1174(VarCurr)).
% 78.27/77.60  all VarCurr (v1976(VarCurr)<->v24(VarCurr)|v1977(VarCurr)).
% 78.27/77.60  all VarCurr (v1977(VarCurr)<->v1978(VarCurr)&v1983(VarCurr)).
% 78.27/77.60  all VarCurr (-v1983(VarCurr)<->v24(VarCurr)).
% 78.27/77.60  all VarCurr (v1978(VarCurr)<->v1979(VarCurr)&v1982(VarCurr)).
% 78.27/77.60  all VarCurr (-v1982(VarCurr)<->v1171(VarCurr)).
% 78.27/77.60  all VarCurr (v1979(VarCurr)<->v452(VarCurr)|v1980(VarCurr)).
% 78.27/77.60  all VarCurr (v1980(VarCurr)<->v1167(VarCurr)&v1981(VarCurr)).
% 78.27/77.60  all VarCurr (-v1981(VarCurr)<->v452(VarCurr)).
% 78.27/77.60  all VarCurr (-v1973(VarCurr)<->v1158(VarCurr)).
% 78.27/77.60  all VarCurr (v1966(VarCurr)<->v1967(VarCurr)|v1968(VarCurr)).
% 78.27/77.60  all VarCurr (v1968(VarCurr)<->v1969(VarCurr)&v1970(VarCurr)).
% 78.27/77.60  all VarCurr (v1970(VarCurr)<->v1178(VarCurr)&v1971(VarCurr)).
% 78.27/77.60  all VarCurr (-v1971(VarCurr)<->v1177(VarCurr)).
% 78.27/77.60  all VarCurr (-v1969(VarCurr)<->v1158(VarCurr)).
% 78.27/77.60  all VarCurr (v1967(VarCurr)<->v24(VarCurr)&v1158(VarCurr)).
% 78.27/77.60  all VarCurr (v1139(VarCurr,bitIndex4)<->v1140(VarCurr,bitIndex4)).
% 78.27/77.60  all VarCurr (-v1959(VarCurr)-> (v1148(VarCurr)<->$F)).
% 78.27/77.60  all VarCurr (v1959(VarCurr)-> (v1148(VarCurr)<->$T)).
% 78.27/77.60  all VarCurr (-v1959(VarCurr)<->v1346(VarCurr)).
% 78.27/77.60  all VarCurr (v1124(VarCurr,bitIndex4)<->v1125(VarCurr,bitIndex4)).
% 78.27/77.60  all VarCurr (-v1956(VarCurr)-> (v1133(VarCurr)<->$F)).
% 78.27/77.60  all VarCurr (v1956(VarCurr)-> (v1133(VarCurr)<->$T)).
% 78.27/77.60  all VarCurr (-v1956(VarCurr)<->v1346(VarCurr)).
% 78.27/77.60  all VarCurr (v1109(VarCurr,bitIndex4)<->v1110(VarCurr,bitIndex4)).
% 78.27/77.60  all VarCurr (-v1953(VarCurr)-> (v1118(VarCurr)<->$F)).
% 78.27/77.60  all VarCurr (v1953(VarCurr)-> (v1118(VarCurr)<->$T)).
% 78.27/77.60  all VarCurr (-v1953(VarCurr)<->v1346(VarCurr)).
% 78.27/77.60  all VarCurr (v1094(VarCurr,bitIndex4)<->v1095(VarCurr,bitIndex4)).
% 78.27/77.60  all VarCurr (-v1950(VarCurr)-> (v1103(VarCurr)<->$F)).
% 78.27/77.60  all VarCurr (v1950(VarCurr)-> (v1103(VarCurr)<->$T)).
% 78.27/77.60  all VarCurr (-v1950(VarCurr)<->v1346(VarCurr)).
% 78.27/77.60  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1937(VarNext)-> (v384(VarNext,bitIndex0)<->v384(VarCurr,bitIndex0)))).
% 78.27/77.60  all VarNext (v1937(VarNext)-> (v384(VarNext,bitIndex0)<->v1945(VarNext))).
% 78.27/77.60  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1945(VarNext)<->v1943(VarCurr))).
% 78.27/77.60  all VarCurr (-v1217(VarCurr)-> (v1943(VarCurr)<->v386(VarCurr,bitIndex0))).
% 78.27/77.60  all VarCurr (v1217(VarCurr)-> (v1943(VarCurr)<->$T)).
% 78.27/77.60  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1937(VarNext)<->v1938(VarNext))).
% 78.27/77.60  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1938(VarNext)<->v1940(VarNext)&v355(VarNext))).
% 78.27/77.60  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1940(VarNext)<->v362(VarNext))).
% 78.27/77.60  all VarCurr (-v1889(VarCurr)-> (v386(VarCurr,bitIndex0)<->$F)).
% 78.27/77.60  all VarCurr (v1889(VarCurr)-> (v386(VarCurr,bitIndex0)<->v1929(VarCurr))).
% 78.27/77.60  all VarCurr (-v1158(VarCurr)& -v1893(VarCurr)& -v1894(VarCurr)& -v1907(VarCurr)&v1918(VarCurr)-> (v1929(VarCurr)<->v1934(VarCurr))).
% 78.27/77.60  all VarCurr (-v1158(VarCurr)& -v1893(VarCurr)& -v1894(VarCurr)&v1907(VarCurr)-> (v1929(VarCurr)<->v1933(VarCurr))).
% 78.27/77.60  all VarCurr (-v1158(VarCurr)& -v1893(VarCurr)&v1894(VarCurr)-> (v1929(VarCurr)<->v1931(VarCurr))).
% 78.27/77.60  all VarCurr (-v1158(VarCurr)&v1893(VarCurr)-> (v1929(VarCurr)<->$T)).
% 78.27/77.60  all VarCurr (v1158(VarCurr)-> (v1929(VarCurr)<->v1930(VarCurr))).
% 78.27/77.60  all VarCurr (-v24(VarCurr)-> (v1934(VarCurr)<->$T)).
% 78.27/77.60  all VarCurr (v24(VarCurr)-> (v1934(VarCurr)<->v1139(VarCurr,bitIndex10))).
% 78.27/77.60  all VarCurr (-v24(VarCurr)-> (v1933(VarCurr)<->$T)).
% 78.27/77.60  all VarCurr (v24(VarCurr)-> (v1933(VarCurr)<->v1124(VarCurr,bitIndex10))).
% 78.27/77.60  all VarCurr (-v24(VarCurr)-> (v1931(VarCurr)<->v1932(VarCurr))).
% 78.27/77.60  all VarCurr (v24(VarCurr)-> (v1931(VarCurr)<->v1109(VarCurr,bitIndex10))).
% 78.27/77.60  all VarCurr (-v772(VarCurr)-> (v1932(VarCurr)<->$T)).
% 78.27/77.60  all VarCurr (v772(VarCurr)-> (v1932(VarCurr)<->$T)).
% 78.27/77.60  all VarCurr (-v24(VarCurr)-> (v1930(VarCurr)<->$T)).
% 78.27/77.60  all VarCurr (v24(VarCurr)-> (v1930(VarCurr)<->v1094(VarCurr,bitIndex10))).
% 78.27/77.60  all VarCurr (v1889(VarCurr)<->v1890(VarCurr)|v1918(VarCurr)).
% 78.27/77.60  all VarCurr (v1918(VarCurr)<->v1919(VarCurr)&v1922(VarCurr)).
% 78.27/77.60  all VarCurr (v1922(VarCurr)<->v1923(VarCurr)&v1928(VarCurr)).
% 78.27/77.60  all VarCurr (-v1928(VarCurr)<->v1917(VarCurr)).
% 78.27/77.60  all VarCurr (v1923(VarCurr)<->v1924(VarCurr)&v1200(VarCurr)).
% 78.27/77.60  all VarCurr (v1924(VarCurr)<->v24(VarCurr)|v1925(VarCurr)).
% 78.27/77.60  all VarCurr (v1925(VarCurr)<->v1926(VarCurr)&v1927(VarCurr)).
% 78.27/77.60  all VarCurr (-v1927(VarCurr)<->v24(VarCurr)).
% 78.27/77.60  all VarCurr (-v1926(VarCurr)<->v768(VarCurr)).
% 78.27/77.60  all VarCurr (-v1919(VarCurr)<->v1920(VarCurr)).
% 78.27/77.60  all VarCurr (v1920(VarCurr)<->v1921(VarCurr)|v1187(VarCurr)).
% 78.27/77.60  all VarCurr (v1921(VarCurr)<->v1177(VarCurr)|v1174(VarCurr)).
% 78.27/77.60  all VarCurr (v1890(VarCurr)<->v1891(VarCurr)|v1907(VarCurr)).
% 78.27/77.60  all VarCurr (v1907(VarCurr)<->v1908(VarCurr)&v1910(VarCurr)).
% 78.27/77.60  all VarCurr (v1910(VarCurr)<->v1911(VarCurr)&v1916(VarCurr)).
% 78.27/77.60  all VarCurr (-v1916(VarCurr)<->v1917(VarCurr)).
% 78.27/77.60  all VarCurr (v1917(VarCurr)<->v1178(VarCurr)|v1190(VarCurr)).
% 78.27/77.60  all VarCurr (v1911(VarCurr)<->v1912(VarCurr)&v1187(VarCurr)).
% 78.27/77.60  all VarCurr (v1912(VarCurr)<->v24(VarCurr)|v1913(VarCurr)).
% 78.27/77.60  all VarCurr (v1913(VarCurr)<->v1914(VarCurr)&v1915(VarCurr)).
% 78.27/77.60  all VarCurr (-v1915(VarCurr)<->v24(VarCurr)).
% 78.27/77.60  all VarCurr (-v1914(VarCurr)<->v768(VarCurr)).
% 78.27/77.60  all VarCurr (-v1908(VarCurr)<->v1909(VarCurr)).
% 78.27/77.60  all VarCurr (v1909(VarCurr)<->v1177(VarCurr)|v1174(VarCurr)).
% 78.27/77.60  all VarCurr (v1891(VarCurr)<->v1892(VarCurr)|v1894(VarCurr)).
% 78.27/77.60  all VarCurr (v1894(VarCurr)<->v1895(VarCurr)&v1896(VarCurr)).
% 78.27/77.60  all VarCurr (v1896(VarCurr)<->v1897(VarCurr)&v1906(VarCurr)).
% 78.27/77.60  all VarCurr (-v1906(VarCurr)<->v1178(VarCurr)).
% 78.27/77.60  all VarCurr (v1897(VarCurr)<->v1898(VarCurr)&v1174(VarCurr)).
% 78.27/77.60  all VarCurr (v1898(VarCurr)<->v24(VarCurr)|v1899(VarCurr)).
% 78.27/77.60  all VarCurr (v1899(VarCurr)<->v1900(VarCurr)&v1905(VarCurr)).
% 78.27/77.60  all VarCurr (-v1905(VarCurr)<->v24(VarCurr)).
% 78.27/77.60  all VarCurr (v1900(VarCurr)<->v1901(VarCurr)&v1904(VarCurr)).
% 78.27/77.60  all VarCurr (-v1904(VarCurr)<->v1171(VarCurr)).
% 78.27/77.60  all VarCurr (v1901(VarCurr)<->v1902(VarCurr)&v1903(VarCurr)).
% 78.27/77.60  all VarCurr (-v1903(VarCurr)<->v452(VarCurr)).
% 78.27/77.60  all VarCurr (-v1902(VarCurr)<->v1167(VarCurr)).
% 78.27/77.60  all VarCurr (-v1895(VarCurr)<->v1177(VarCurr)).
% 78.27/77.60  all VarCurr (v1892(VarCurr)<->v1158(VarCurr)|v1893(VarCurr)).
% 78.27/77.60  all VarCurr (v1893(VarCurr)<->v388(VarCurr)&v1177(VarCurr)).
% 78.27/77.60  all VarCurr (v1139(VarCurr,bitIndex10)<->v1140(VarCurr,bitIndex10)).
% 78.27/77.60  all VarCurr (-v1346(VarCurr)-> (v1143(VarCurr,bitIndex0)<->$F)).
% 78.27/77.60  all VarCurr (v1346(VarCurr)-> (v1143(VarCurr,bitIndex0)<->v1525(VarCurr,bitIndex0))).
% 78.27/77.60  all VarCurr (-v1628(VarCurr)-> (v1525(VarCurr,bitIndex0)<->$F)).
% 78.27/77.60  all VarCurr (v1628(VarCurr)-> (v1525(VarCurr,bitIndex0)<->$T)).
% 78.27/77.60  all VarCurr (v1124(VarCurr,bitIndex10)<->v1125(VarCurr,bitIndex10)).
% 78.27/77.60  all VarCurr (-v1346(VarCurr)-> (v1128(VarCurr,bitIndex0)<->$F)).
% 78.27/77.60  all VarCurr (v1346(VarCurr)-> (v1128(VarCurr,bitIndex0)<->v1518(VarCurr,bitIndex0))).
% 78.27/77.60  all VarCurr (-v1628(VarCurr)-> (v1518(VarCurr,bitIndex0)<->$F)).
% 78.27/77.60  all VarCurr (v1628(VarCurr)-> (v1518(VarCurr,bitIndex0)<->$T)).
% 78.27/77.60  all VarCurr (v1109(VarCurr,bitIndex10)<->v1110(VarCurr,bitIndex10)).
% 78.27/77.60  all VarCurr (-v1346(VarCurr)-> (v1113(VarCurr,bitIndex0)<->$F)).
% 78.27/77.60  all VarCurr (v1346(VarCurr)-> (v1113(VarCurr,bitIndex0)<->v1511(VarCurr,bitIndex0))).
% 78.27/77.60  all VarCurr (-v1628(VarCurr)-> (v1511(VarCurr,bitIndex0)<->$F)).
% 78.27/77.60  all VarCurr (v1628(VarCurr)-> (v1511(VarCurr,bitIndex0)<->$T)).
% 78.27/77.60  all VarCurr (v1094(VarCurr,bitIndex10)<->v1095(VarCurr,bitIndex10)).
% 78.27/77.60  all VarCurr (-v1346(VarCurr)-> (v1098(VarCurr,bitIndex0)<->$F)).
% 78.27/77.60  all VarCurr (v1346(VarCurr)-> (v1098(VarCurr,bitIndex0)<->v1504(VarCurr,bitIndex0))).
% 78.27/77.60  all VarCurr (-v1628(VarCurr)-> (v1504(VarCurr,bitIndex0)<->$F)).
% 78.27/77.60  all VarCurr (v1628(VarCurr)-> (v1504(VarCurr,bitIndex0)<->$T)).
% 78.27/77.60  all VarCurr (v388(VarCurr)<->v1874(VarCurr)&v390(VarCurr,bitIndex0)).
% 78.27/77.60  all VarCurr (-v1874(VarCurr)<->v1875(VarCurr)).
% 78.27/77.60  all VarCurr (v1875(VarCurr)<->v1877(VarCurr)|v390(VarCurr,bitIndex5)).
% 78.27/77.60  all VarCurr (v1877(VarCurr)<->v1878(VarCurr)|v390(VarCurr,bitIndex4)).
% 78.27/77.60  all VarCurr (v1878(VarCurr)<->v1879(VarCurr)|v390(VarCurr,bitIndex3)).
% 78.27/77.60  all VarCurr (v1879(VarCurr)<->v390(VarCurr,bitIndex1)|v390(VarCurr,bitIndex2)).
% 78.27/77.60  all VarCurr (-v1820(VarCurr)-> (v392(VarCurr)<->$F)).
% 78.27/77.60  all VarCurr (v1820(VarCurr)-> (v392(VarCurr)<->v1870(VarCurr))).
% 78.27/77.60  all VarCurr (-v1827(VarCurr)& -v1871(VarCurr)& -v1833(VarCurr)& -v1842(VarCurr)& -v1848(VarCurr)& -v1854(VarCurr)&v1861(VarCurr)-> (v1870(VarCurr)<->v1708(VarCurr))).
% 78.27/77.60  all VarCurr (-v1827(VarCurr)& -v1871(VarCurr)& -v1833(VarCurr)& -v1842(VarCurr)& -v1848(VarCurr)&v1854(VarCurr)-> (v1870(VarCurr)<->v1139(VarCurr,bitIndex2))).
% 78.27/77.60  all VarCurr (-v1827(VarCurr)& -v1871(VarCurr)& -v1833(VarCurr)& -v1842(VarCurr)&v1848(VarCurr)-> (v1870(VarCurr)<->v1124(VarCurr,bitIndex2))).
% 78.27/77.60  all VarCurr (-v1827(VarCurr)& -v1871(VarCurr)& -v1833(VarCurr)&v1842(VarCurr)-> (v1870(VarCurr)<->v1708(VarCurr))).
% 78.27/77.60  all VarCurr (-v1827(VarCurr)& -v1871(VarCurr)&v1833(VarCurr)-> (v1870(VarCurr)<->v1872(VarCurr))).
% 78.27/77.60  all VarCurr (-v1827(VarCurr)&v1871(VarCurr)-> (v1870(VarCurr)<->$T)).
% 78.27/77.60  all VarCurr (v1827(VarCurr)-> (v1870(VarCurr)<->v1094(VarCurr,bitIndex2))).
% 78.27/77.60  all VarCurr (-v24(VarCurr)-> (v1872(VarCurr)<->v1708(VarCurr))).
% 78.27/77.60  all VarCurr (v24(VarCurr)-> (v1872(VarCurr)<->v1109(VarCurr,bitIndex2))).
% 78.27/77.60  all VarCurr (v1871(VarCurr)<->v1828(VarCurr)|v1830(VarCurr)).
% 78.27/77.60  all VarCurr (v1820(VarCurr)<->v1821(VarCurr)|v1861(VarCurr)).
% 78.27/77.60  all VarCurr (v1861(VarCurr)<->v1862(VarCurr)&v1868(VarCurr)).
% 78.27/77.60  all VarCurr (v1868(VarCurr)<->v1869(VarCurr)&v1266(VarCurr)).
% 78.27/77.60  all VarCurr (-v1869(VarCurr)<->v1088(VarCurr)).
% 78.27/77.60  all VarCurr (-v1862(VarCurr)<->v1863(VarCurr)).
% 78.27/77.60  all VarCurr (v1863(VarCurr)<->v1864(VarCurr)|v1200(VarCurr)).
% 78.27/77.60  all VarCurr (v1864(VarCurr)<->v1865(VarCurr)|v1187(VarCurr)).
% 78.27/77.60  all VarCurr (v1865(VarCurr)<->v1866(VarCurr)|v1190(VarCurr)).
% 78.27/77.60  all VarCurr (v1866(VarCurr)<->v1867(VarCurr)|v1174(VarCurr)).
% 78.27/77.60  all VarCurr (v1867(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.27/77.60  all VarCurr (v1821(VarCurr)<->v1822(VarCurr)|v1854(VarCurr)).
% 78.27/77.60  all VarCurr (v1854(VarCurr)<->v1855(VarCurr)&v1860(VarCurr)).
% 78.27/77.60  all VarCurr (v1860(VarCurr)<->v24(VarCurr)&v1200(VarCurr)).
% 78.27/77.60  all VarCurr (-v1855(VarCurr)<->v1856(VarCurr)).
% 78.27/77.60  all VarCurr (v1856(VarCurr)<->v1857(VarCurr)|v1187(VarCurr)).
% 78.27/77.60  all VarCurr (v1857(VarCurr)<->v1858(VarCurr)|v1190(VarCurr)).
% 78.27/77.60  all VarCurr (v1858(VarCurr)<->v1859(VarCurr)|v1174(VarCurr)).
% 78.27/77.60  all VarCurr (v1859(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.27/77.60  all VarCurr (v1822(VarCurr)<->v1823(VarCurr)|v1848(VarCurr)).
% 78.27/77.60  all VarCurr (v1848(VarCurr)<->v1849(VarCurr)&v1853(VarCurr)).
% 78.27/77.60  all VarCurr (v1853(VarCurr)<->v24(VarCurr)&v1187(VarCurr)).
% 78.27/77.60  all VarCurr (-v1849(VarCurr)<->v1850(VarCurr)).
% 78.27/77.60  all VarCurr (v1850(VarCurr)<->v1851(VarCurr)|v1190(VarCurr)).
% 78.27/77.60  all VarCurr (v1851(VarCurr)<->v1852(VarCurr)|v1174(VarCurr)).
% 78.27/77.60  all VarCurr (v1852(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.27/77.60  all VarCurr (v1823(VarCurr)<->v1824(VarCurr)|v1842(VarCurr)).
% 78.27/77.60  all VarCurr (v1842(VarCurr)<->v1843(VarCurr)&v1846(VarCurr)).
% 78.27/77.60  all VarCurr (v1846(VarCurr)<->v1847(VarCurr)&v1190(VarCurr)).
% 78.27/77.60  all VarCurr (-v1847(VarCurr)<->v1241(VarCurr)).
% 78.27/77.60  all VarCurr (-v1843(VarCurr)<->v1844(VarCurr)).
% 78.27/77.60  all VarCurr (v1844(VarCurr)<->v1845(VarCurr)|v1174(VarCurr)).
% 78.27/77.60  all VarCurr (v1845(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.27/77.60  all VarCurr (v1824(VarCurr)<->v1825(VarCurr)|v1833(VarCurr)).
% 78.27/77.60  all VarCurr (v1833(VarCurr)<->v1834(VarCurr)&v1836(VarCurr)).
% 78.27/77.60  all VarCurr (v1836(VarCurr)<->v1837(VarCurr)&v1174(VarCurr)).
% 78.27/77.60  all VarCurr (v1837(VarCurr)<->v24(VarCurr)|v1838(VarCurr)).
% 78.27/77.60  all VarCurr (v1838(VarCurr)<->v1839(VarCurr)&v1841(VarCurr)).
% 78.27/77.60  all VarCurr (-v1841(VarCurr)<->v24(VarCurr)).
% 78.27/77.60  all VarCurr (v1839(VarCurr)<->v452(VarCurr)&v1840(VarCurr)).
% 78.27/77.60  all VarCurr (-v1840(VarCurr)<->v1171(VarCurr)).
% 78.27/77.60  all VarCurr (-v1834(VarCurr)<->v1835(VarCurr)).
% 78.27/77.60  all VarCurr (v1835(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.27/77.60  all VarCurr (v1825(VarCurr)<->v1826(VarCurr)|v1830(VarCurr)).
% 78.27/77.60  all VarCurr (v1830(VarCurr)<->v1831(VarCurr)&v1832(VarCurr)).
% 78.27/77.60  all VarCurr (v1832(VarCurr)<->v395(VarCurr)&v1178(VarCurr)).
% 78.27/77.60  all VarCurr (-v1831(VarCurr)<->v1158(VarCurr)).
% 78.27/77.60  all VarCurr (v1826(VarCurr)<->v1827(VarCurr)|v1828(VarCurr)).
% 78.27/77.60  all VarCurr (v1828(VarCurr)<->v1829(VarCurr)&v1177(VarCurr)).
% 78.27/77.60  all VarCurr (-v1829(VarCurr)<->v1158(VarCurr)).
% 78.27/77.60  all VarCurr (v1827(VarCurr)<->v24(VarCurr)&v1158(VarCurr)).
% 78.27/77.60  all VarCurr (v1139(VarCurr,bitIndex2)<->v1140(VarCurr,bitIndex2)).
% 78.27/77.60  all VarCurr (-v1346(VarCurr)-> (v1150(VarCurr)<->v1816(VarCurr))).
% 78.27/77.60  all VarCurr (v1346(VarCurr)-> (v1150(VarCurr)<->$T)).
% 78.27/77.60  all VarCurr (-v395(VarCurr)-> (v1816(VarCurr)<->$F)).
% 78.27/77.60  all VarCurr (v395(VarCurr)-> (v1816(VarCurr)<->$T)).
% 78.27/77.60  all VarCurr (v1124(VarCurr,bitIndex2)<->v1125(VarCurr,bitIndex2)).
% 78.27/77.60  all VarCurr (-v1346(VarCurr)-> (v1135(VarCurr)<->v1811(VarCurr))).
% 78.27/77.60  all VarCurr (v1346(VarCurr)-> (v1135(VarCurr)<->$T)).
% 78.27/77.60  all VarCurr (-v395(VarCurr)-> (v1811(VarCurr)<->$F)).
% 78.27/77.60  all VarCurr (v395(VarCurr)-> (v1811(VarCurr)<->$T)).
% 78.27/77.60  all VarCurr (v1708(VarCurr)<->v1805(VarCurr)|v390(VarCurr,bitIndex5)).
% 78.27/77.60  all VarCurr (v1805(VarCurr)<->v1806(VarCurr)|v390(VarCurr,bitIndex4)).
% 78.27/77.60  all VarCurr (v1806(VarCurr)<->v1807(VarCurr)|v390(VarCurr,bitIndex3)).
% 78.27/77.60  all VarCurr (v1807(VarCurr)<->v1808(VarCurr)|v390(VarCurr,bitIndex2)).
% 78.27/77.60  all VarCurr (v1808(VarCurr)<->v390(VarCurr,bitIndex0)|v390(VarCurr,bitIndex1)).
% 78.27/77.60  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1711(VarNext)-> (all B (range_5_0(B)-> (v390(VarNext,B)<->v390(VarCurr,B)))))).
% 78.27/77.60  all VarNext (v1711(VarNext)-> (all B (range_5_0(B)-> (v390(VarNext,B)<->v1728(VarNext,B))))).
% 78.27/77.60  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_5_0(B)-> (v1728(VarNext,B)<->v1726(VarCurr,B))))).
% 78.27/77.60  all VarCurr (-v1720(VarCurr)-> (all B (range_5_0(B)-> (v1726(VarCurr,B)<->v1729(VarCurr,B))))).
% 78.27/77.60  all VarCurr (v1720(VarCurr)-> (all B (range_5_0(B)-> (v1726(VarCurr,B)<->$F)))).
% 78.27/77.60  all VarCurr (-v24(VarCurr)-> (all B (range_5_0(B)-> (v1729(VarCurr,B)<->v1766(VarCurr,B))))).
% 78.27/77.60  all VarCurr (v24(VarCurr)-> (all B (range_5_0(B)-> (v1729(VarCurr,B)<->v1730(VarCurr,B))))).
% 78.27/77.60  all VarCurr (v1766(VarCurr,bitIndex0)<->v1799(VarCurr)).
% 78.27/77.61  all VarCurr (v1766(VarCurr,bitIndex1)<->v1797(VarCurr)).
% 78.27/77.61  all VarCurr (v1766(VarCurr,bitIndex2)<->v1793(VarCurr)).
% 78.27/77.61  all VarCurr (v1766(VarCurr,bitIndex3)<->v1789(VarCurr)).
% 78.27/77.61  all VarCurr (v1766(VarCurr,bitIndex4)<->v1785(VarCurr)).
% 78.27/77.61  all VarCurr (v1766(VarCurr,bitIndex5)<->v1768(VarCurr)).
% 78.27/77.61  all VarCurr (v1797(VarCurr)<->v1798(VarCurr)&v1800(VarCurr)).
% 78.27/77.61  all VarCurr (v1800(VarCurr)<->v390(VarCurr,bitIndex0)|v1779(VarCurr)).
% 78.27/77.61  all VarCurr (v1798(VarCurr)<->v1799(VarCurr)|v390(VarCurr,bitIndex1)).
% 78.27/77.61  all VarCurr (-v1799(VarCurr)<->v390(VarCurr,bitIndex0)).
% 78.27/77.61  all VarCurr (v1793(VarCurr)<->v1794(VarCurr)&v1796(VarCurr)).
% 78.27/77.61  all VarCurr (v1796(VarCurr)<->v1777(VarCurr)|v1780(VarCurr)).
% 78.27/77.61  all VarCurr (v1794(VarCurr)<->v1795(VarCurr)|v390(VarCurr,bitIndex2)).
% 78.27/77.61  all VarCurr (-v1795(VarCurr)<->v1777(VarCurr)).
% 78.27/77.61  all VarCurr (v1789(VarCurr)<->v1790(VarCurr)&v1792(VarCurr)).
% 78.27/77.61  all VarCurr (v1792(VarCurr)<->v1775(VarCurr)|v1781(VarCurr)).
% 78.27/77.61  all VarCurr (v1790(VarCurr)<->v1791(VarCurr)|v390(VarCurr,bitIndex3)).
% 78.27/77.61  all VarCurr (-v1791(VarCurr)<->v1775(VarCurr)).
% 78.27/77.61  all VarCurr (v1785(VarCurr)<->v1786(VarCurr)&v1788(VarCurr)).
% 78.27/77.61  all VarCurr (v1788(VarCurr)<->v1773(VarCurr)|v1782(VarCurr)).
% 78.27/77.61  all VarCurr (v1786(VarCurr)<->v1787(VarCurr)|v390(VarCurr,bitIndex4)).
% 78.27/77.61  all VarCurr (-v1787(VarCurr)<->v1773(VarCurr)).
% 78.27/77.61  all VarCurr (v1768(VarCurr)<->v1769(VarCurr)&v1783(VarCurr)).
% 78.27/77.61  all VarCurr (v1783(VarCurr)<->v1771(VarCurr)|v1784(VarCurr)).
% 78.27/77.61  all VarCurr (-v1784(VarCurr)<->v390(VarCurr,bitIndex5)).
% 78.27/77.61  all VarCurr (v1769(VarCurr)<->v1770(VarCurr)|v390(VarCurr,bitIndex5)).
% 78.27/77.61  all VarCurr (-v1770(VarCurr)<->v1771(VarCurr)).
% 78.27/77.61  all VarCurr (v1771(VarCurr)<->v390(VarCurr,bitIndex4)|v1772(VarCurr)).
% 78.27/77.61  all VarCurr (v1772(VarCurr)<->v1773(VarCurr)&v1782(VarCurr)).
% 78.27/77.61  all VarCurr (-v1782(VarCurr)<->v390(VarCurr,bitIndex4)).
% 78.27/77.61  all VarCurr (v1773(VarCurr)<->v390(VarCurr,bitIndex3)|v1774(VarCurr)).
% 78.27/77.61  all VarCurr (v1774(VarCurr)<->v1775(VarCurr)&v1781(VarCurr)).
% 78.27/77.61  all VarCurr (-v1781(VarCurr)<->v390(VarCurr,bitIndex3)).
% 78.27/77.61  all VarCurr (v1775(VarCurr)<->v390(VarCurr,bitIndex2)|v1776(VarCurr)).
% 78.27/77.61  all VarCurr (v1776(VarCurr)<->v1777(VarCurr)&v1780(VarCurr)).
% 78.27/77.61  all VarCurr (-v1780(VarCurr)<->v390(VarCurr,bitIndex2)).
% 78.27/77.61  all VarCurr (v1777(VarCurr)<->v390(VarCurr,bitIndex1)|v1778(VarCurr)).
% 78.27/77.61  all VarCurr (v1778(VarCurr)<->v390(VarCurr,bitIndex0)&v1779(VarCurr)).
% 78.27/77.61  all VarCurr (-v1779(VarCurr)<->v390(VarCurr,bitIndex1)).
% 78.27/77.61  all VarCurr (-v392(VarCurr)-> (all B (range_5_0(B)-> (v1730(VarCurr,B)<->v1630(VarCurr,B))))).
% 78.27/77.61  all VarCurr (v392(VarCurr)-> (all B (range_5_0(B)-> (v1730(VarCurr,B)<->v1731(VarCurr,B))))).
% 78.27/77.61  all VarCurr (v1731(VarCurr,bitIndex0)<->v1764(VarCurr)).
% 78.27/77.61  all VarCurr (v1731(VarCurr,bitIndex1)<->v1762(VarCurr)).
% 78.27/77.61  all VarCurr (v1731(VarCurr,bitIndex2)<->v1758(VarCurr)).
% 78.27/77.61  all VarCurr (v1731(VarCurr,bitIndex3)<->v1754(VarCurr)).
% 78.27/77.61  all VarCurr (v1731(VarCurr,bitIndex4)<->v1750(VarCurr)).
% 78.27/77.61  all VarCurr (v1731(VarCurr,bitIndex5)<->v1733(VarCurr)).
% 78.27/77.61  all VarCurr (v1762(VarCurr)<->v1763(VarCurr)&v1765(VarCurr)).
% 78.27/77.61  all VarCurr (v1765(VarCurr)<->v1747(VarCurr)|v1630(VarCurr,bitIndex0)).
% 78.27/77.61  all VarCurr (v1763(VarCurr)<->v1630(VarCurr,bitIndex1)|v1764(VarCurr)).
% 78.27/77.61  all VarCurr (-v1764(VarCurr)<->v1630(VarCurr,bitIndex0)).
% 78.27/77.61  all VarCurr (v1758(VarCurr)<->v1759(VarCurr)&v1761(VarCurr)).
% 78.27/77.61  all VarCurr (v1761(VarCurr)<->v1744(VarCurr)|v1745(VarCurr)).
% 78.27/77.61  all VarCurr (v1759(VarCurr)<->v1630(VarCurr,bitIndex2)|v1760(VarCurr)).
% 78.27/77.61  all VarCurr (-v1760(VarCurr)<->v1745(VarCurr)).
% 78.27/77.61  all VarCurr (v1754(VarCurr)<->v1755(VarCurr)&v1757(VarCurr)).
% 78.27/77.61  all VarCurr (v1757(VarCurr)<->v1741(VarCurr)|v1742(VarCurr)).
% 78.27/77.61  all VarCurr (v1755(VarCurr)<->v1630(VarCurr,bitIndex3)|v1756(VarCurr)).
% 78.27/77.61  all VarCurr (-v1756(VarCurr)<->v1742(VarCurr)).
% 78.27/77.61  all VarCurr (v1750(VarCurr)<->v1751(VarCurr)&v1753(VarCurr)).
% 78.27/77.61  all VarCurr (v1753(VarCurr)<->v1738(VarCurr)|v1739(VarCurr)).
% 78.27/77.61  all VarCurr (v1751(VarCurr)<->v1630(VarCurr,bitIndex4)|v1752(VarCurr)).
% 78.27/77.61  all VarCurr (-v1752(VarCurr)<->v1739(VarCurr)).
% 78.27/77.61  all VarCurr (v1733(VarCurr)<->v1734(VarCurr)&v1748(VarCurr)).
% 78.27/77.61  all VarCurr (v1748(VarCurr)<->v1749(VarCurr)|v1736(VarCurr)).
% 78.27/77.61  all VarCurr (-v1749(VarCurr)<->v1630(VarCurr,bitIndex5)).
% 78.27/77.61  all VarCurr (v1734(VarCurr)<->v1630(VarCurr,bitIndex5)|v1735(VarCurr)).
% 78.28/77.61  all VarCurr (-v1735(VarCurr)<->v1736(VarCurr)).
% 78.28/77.61  all VarCurr (v1736(VarCurr)<->v1630(VarCurr,bitIndex4)|v1737(VarCurr)).
% 78.28/77.61  all VarCurr (v1737(VarCurr)<->v1738(VarCurr)&v1739(VarCurr)).
% 78.28/77.61  all VarCurr (v1739(VarCurr)<->v1630(VarCurr,bitIndex3)|v1740(VarCurr)).
% 78.28/77.61  all VarCurr (v1740(VarCurr)<->v1741(VarCurr)&v1742(VarCurr)).
% 78.28/77.61  all VarCurr (v1742(VarCurr)<->v1630(VarCurr,bitIndex2)|v1743(VarCurr)).
% 78.28/77.61  all VarCurr (v1743(VarCurr)<->v1744(VarCurr)&v1745(VarCurr)).
% 78.28/77.61  all VarCurr (v1745(VarCurr)<->v1630(VarCurr,bitIndex1)|v1746(VarCurr)).
% 78.28/77.61  all VarCurr (v1746(VarCurr)<->v1747(VarCurr)&v1630(VarCurr,bitIndex0)).
% 78.28/77.61  all VarCurr (-v1747(VarCurr)<->v1630(VarCurr,bitIndex1)).
% 78.28/77.61  all VarCurr (-v1744(VarCurr)<->v1630(VarCurr,bitIndex2)).
% 78.28/77.61  all VarCurr (-v1741(VarCurr)<->v1630(VarCurr,bitIndex3)).
% 78.28/77.61  all VarCurr (-v1738(VarCurr)<->v1630(VarCurr,bitIndex4)).
% 78.28/77.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1711(VarNext)<->v1712(VarNext)&v1719(VarNext))).
% 78.28/77.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1719(VarNext)<->v1717(VarCurr))).
% 78.28/77.61  all VarCurr (v1717(VarCurr)<->v1720(VarCurr)|v1721(VarCurr)).
% 78.28/77.61  all VarCurr (v1721(VarCurr)<->v1722(VarCurr)&v1725(VarCurr)).
% 78.28/77.61  all VarCurr (-v1725(VarCurr)<->v1720(VarCurr)).
% 78.28/77.61  all VarCurr (v1722(VarCurr)<->v24(VarCurr)|v1723(VarCurr)).
% 78.28/77.61  all VarCurr (v1723(VarCurr)<->v392(VarCurr)&v1724(VarCurr)).
% 78.28/77.61  all VarCurr (-v1724(VarCurr)<->v24(VarCurr)).
% 78.28/77.61  all VarCurr (-v1720(VarCurr)<->v15(VarCurr)).
% 78.28/77.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1712(VarNext)<->v1713(VarNext)&v355(VarNext))).
% 78.28/77.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1713(VarNext)<->v362(VarNext))).
% 78.28/77.61  all B (range_5_0(B)-> (v390(constB0,B)<->$F)).
% 78.28/77.61  all VarCurr (v1109(VarCurr,bitIndex2)<->v1110(VarCurr,bitIndex2)).
% 78.28/77.61  all VarCurr (-v1346(VarCurr)-> (v1120(VarCurr)<->v1704(VarCurr))).
% 78.28/77.61  all VarCurr (v1346(VarCurr)-> (v1120(VarCurr)<->$T)).
% 78.28/77.61  all VarCurr (-v395(VarCurr)-> (v1704(VarCurr)<->$F)).
% 78.28/77.61  all VarCurr (v395(VarCurr)-> (v1704(VarCurr)<->$T)).
% 78.28/77.61  all VarCurr (v1094(VarCurr,bitIndex2)<->v1095(VarCurr,bitIndex2)).
% 78.28/77.61  all VarCurr (-v1346(VarCurr)-> (v1105(VarCurr)<->v1699(VarCurr))).
% 78.28/77.61  all VarCurr (v1346(VarCurr)-> (v1105(VarCurr)<->$T)).
% 78.28/77.61  all VarCurr (-v395(VarCurr)-> (v1699(VarCurr)<->$F)).
% 78.28/77.61  all VarCurr (v395(VarCurr)-> (v1699(VarCurr)<->$T)).
% 78.28/77.61  all VarNext (v384(VarNext,bitIndex1)<->v1690(VarNext,bitIndex0)).
% 78.28/77.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1691(VarNext)-> (v1690(VarNext,bitIndex6)<->v384(VarCurr,bitIndex7))& (v1690(VarNext,bitIndex5)<->v384(VarCurr,bitIndex6))& (v1690(VarNext,bitIndex4)<->v384(VarCurr,bitIndex5))& (v1690(VarNext,bitIndex3)<->v384(VarCurr,bitIndex4))& (v1690(VarNext,bitIndex2)<->v384(VarCurr,bitIndex3))& (v1690(VarNext,bitIndex1)<->v384(VarCurr,bitIndex2))& (v1690(VarNext,bitIndex0)<->v384(VarCurr,bitIndex1)))).
% 78.28/77.61  all VarNext (v1691(VarNext)-> (all B (range_6_0(B)-> (v1690(VarNext,B)<->v1216(VarNext,B))))).
% 78.28/77.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1691(VarNext)<->v1692(VarNext))).
% 78.28/77.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1692(VarNext)<->v1694(VarNext)&v355(VarNext))).
% 78.28/77.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1694(VarNext)<->v362(VarNext))).
% 78.28/77.61  all VarCurr (-v1657(VarCurr)-> (v386(VarCurr,bitIndex1)<->$F)).
% 78.28/77.61  all VarCurr (v1657(VarCurr)-> (v386(VarCurr,bitIndex1)<->v1688(VarCurr))).
% 78.28/77.61  all VarCurr (-v1661(VarCurr)& -v1662(VarCurr)& -v1666(VarCurr)& -v1672(VarCurr)&v1680(VarCurr)-> (v1688(VarCurr)<->v1139(VarCurr,bitIndex11))).
% 78.28/77.61  all VarCurr (-v1661(VarCurr)& -v1662(VarCurr)& -v1666(VarCurr)&v1672(VarCurr)-> (v1688(VarCurr)<->v1124(VarCurr,bitIndex11))).
% 78.28/77.61  all VarCurr (-v1661(VarCurr)& -v1662(VarCurr)&v1666(VarCurr)-> (v1688(VarCurr)<->v1109(VarCurr,bitIndex11))).
% 78.28/77.61  all VarCurr (-v1661(VarCurr)&v1662(VarCurr)-> (v1688(VarCurr)<->$T)).
% 78.28/77.61  all VarCurr (v1661(VarCurr)-> (v1688(VarCurr)<->v1094(VarCurr,bitIndex11))).
% 78.28/77.61  all VarCurr (v1657(VarCurr)<->v1658(VarCurr)|v1680(VarCurr)).
% 78.28/77.61  all VarCurr (v1680(VarCurr)<->v1681(VarCurr)&v1685(VarCurr)).
% 78.28/77.61  all VarCurr (v1685(VarCurr)<->v1686(VarCurr)&v1687(VarCurr)).
% 78.28/77.61  all VarCurr (-v1687(VarCurr)<->v1679(VarCurr)).
% 78.28/77.61  all VarCurr (v1686(VarCurr)<->v24(VarCurr)&v1200(VarCurr)).
% 78.29/77.61  all VarCurr (-v1681(VarCurr)<->v1682(VarCurr)).
% 78.29/77.61  all VarCurr (v1682(VarCurr)<->v1683(VarCurr)|v1187(VarCurr)).
% 78.29/77.61  all VarCurr (v1683(VarCurr)<->v1684(VarCurr)|v1174(VarCurr)).
% 78.29/77.61  all VarCurr (v1684(VarCurr)<->v1158(VarCurr)|v1177(VarCurr)).
% 78.29/77.61  all VarCurr (v1658(VarCurr)<->v1659(VarCurr)|v1672(VarCurr)).
% 78.29/77.61  all VarCurr (v1672(VarCurr)<->v1673(VarCurr)&v1676(VarCurr)).
% 78.29/77.61  all VarCurr (v1676(VarCurr)<->v1677(VarCurr)&v1678(VarCurr)).
% 78.29/77.61  all VarCurr (-v1678(VarCurr)<->v1679(VarCurr)).
% 78.29/77.61  all VarCurr (v1679(VarCurr)<->v1178(VarCurr)|v1190(VarCurr)).
% 78.29/77.61  all VarCurr (v1677(VarCurr)<->v24(VarCurr)&v1187(VarCurr)).
% 78.29/77.61  all VarCurr (-v1673(VarCurr)<->v1674(VarCurr)).
% 78.29/77.61  all VarCurr (v1674(VarCurr)<->v1675(VarCurr)|v1174(VarCurr)).
% 78.29/77.61  all VarCurr (v1675(VarCurr)<->v1158(VarCurr)|v1177(VarCurr)).
% 78.29/77.61  all VarCurr (v1659(VarCurr)<->v1660(VarCurr)|v1666(VarCurr)).
% 78.29/77.61  all VarCurr (v1666(VarCurr)<->v1667(VarCurr)&v1669(VarCurr)).
% 78.29/77.61  all VarCurr (v1669(VarCurr)<->v1670(VarCurr)&v1671(VarCurr)).
% 78.29/77.61  all VarCurr (-v1671(VarCurr)<->v1178(VarCurr)).
% 78.29/77.61  all VarCurr (v1670(VarCurr)<->v24(VarCurr)&v1174(VarCurr)).
% 78.29/77.61  all VarCurr (-v1667(VarCurr)<->v1668(VarCurr)).
% 78.29/77.61  all VarCurr (v1668(VarCurr)<->v1158(VarCurr)|v1177(VarCurr)).
% 78.29/77.61  all VarCurr (v1660(VarCurr)<->v1661(VarCurr)|v1662(VarCurr)).
% 78.29/77.61  all VarCurr (v1662(VarCurr)<->v1663(VarCurr)&v1664(VarCurr)).
% 78.29/77.61  all VarCurr (v1664(VarCurr)<->v1665(VarCurr)&v1177(VarCurr)).
% 78.29/77.61  all VarCurr (-v1665(VarCurr)<->v388(VarCurr)).
% 78.29/77.61  all VarCurr (-v1663(VarCurr)<->v1158(VarCurr)).
% 78.29/77.61  all VarCurr (v1661(VarCurr)<->v24(VarCurr)&v1158(VarCurr)).
% 78.29/77.61  all VarCurr (v1139(VarCurr,bitIndex11)<->v1140(VarCurr,bitIndex11)).
% 78.29/77.61  all VarCurr (-v1346(VarCurr)-> (v1143(VarCurr,bitIndex1)<->$F)).
% 78.29/77.61  all VarCurr (v1346(VarCurr)-> (v1143(VarCurr,bitIndex1)<->v1525(VarCurr,bitIndex1))).
% 78.29/77.61  all VarCurr (-v1654(VarCurr)-> (v1525(VarCurr,bitIndex1)<->$F)).
% 78.29/77.61  all VarCurr (v1654(VarCurr)-> (v1525(VarCurr,bitIndex1)<->$T)).
% 78.29/77.61  all VarCurr (-v1654(VarCurr)<->v1628(VarCurr)).
% 78.29/77.61  all VarCurr (v1124(VarCurr,bitIndex11)<->v1125(VarCurr,bitIndex11)).
% 78.29/77.61  all VarCurr (-v1346(VarCurr)-> (v1128(VarCurr,bitIndex1)<->$F)).
% 78.29/77.61  all VarCurr (v1346(VarCurr)-> (v1128(VarCurr,bitIndex1)<->v1518(VarCurr,bitIndex1))).
% 78.29/77.61  all VarCurr (-v1651(VarCurr)-> (v1518(VarCurr,bitIndex1)<->$F)).
% 78.29/77.61  all VarCurr (v1651(VarCurr)-> (v1518(VarCurr,bitIndex1)<->$T)).
% 78.29/77.61  all VarCurr (-v1651(VarCurr)<->v1628(VarCurr)).
% 78.29/77.61  all VarCurr (v1109(VarCurr,bitIndex11)<->v1110(VarCurr,bitIndex11)).
% 78.29/77.61  all VarCurr (-v1346(VarCurr)-> (v1113(VarCurr,bitIndex1)<->$F)).
% 78.29/77.61  all VarCurr (v1346(VarCurr)-> (v1113(VarCurr,bitIndex1)<->v1511(VarCurr,bitIndex1))).
% 78.29/77.61  all VarCurr (-v1648(VarCurr)-> (v1511(VarCurr,bitIndex1)<->$F)).
% 78.29/77.61  all VarCurr (v1648(VarCurr)-> (v1511(VarCurr,bitIndex1)<->$T)).
% 78.29/77.61  all VarCurr (-v1648(VarCurr)<->v1628(VarCurr)).
% 78.29/77.61  all VarCurr (v1094(VarCurr,bitIndex11)<->v1095(VarCurr,bitIndex11)).
% 78.29/77.61  all VarCurr (-v1346(VarCurr)-> (v1098(VarCurr,bitIndex1)<->$F)).
% 78.29/77.61  all VarCurr (v1346(VarCurr)-> (v1098(VarCurr,bitIndex1)<->v1504(VarCurr,bitIndex1))).
% 78.29/77.61  all VarCurr (-v1645(VarCurr)-> (v1504(VarCurr,bitIndex1)<->$F)).
% 78.29/77.61  all VarCurr (v1645(VarCurr)-> (v1504(VarCurr,bitIndex1)<->$T)).
% 78.29/77.61  all VarCurr (-v1645(VarCurr)<->v1628(VarCurr)).
% 78.29/77.61  all VarCurr (v1628(VarCurr)<->v1638(VarCurr)&v1630(VarCurr,bitIndex0)).
% 78.29/77.61  all VarCurr (-v1638(VarCurr)<->v1639(VarCurr)).
% 78.29/77.61  all VarCurr (v1639(VarCurr)<->v1641(VarCurr)|v1630(VarCurr,bitIndex5)).
% 78.29/77.61  all VarCurr (v1641(VarCurr)<->v1642(VarCurr)|v1630(VarCurr,bitIndex4)).
% 78.29/77.61  all VarCurr (v1642(VarCurr)<->v1643(VarCurr)|v1630(VarCurr,bitIndex3)).
% 78.29/77.61  all VarCurr (v1643(VarCurr)<->v1630(VarCurr,bitIndex1)|v1630(VarCurr,bitIndex2)).
% 78.29/77.61  all VarCurr (v1630(VarCurr,bitIndex0)<->v1635(VarCurr,bitIndex0)).
% 78.29/77.61  all VarCurr B (range_5_1(B)-> (v1630(VarCurr,B)<->v1635(VarCurr,B))).
% 78.29/77.61  all B (range_5_1(B)<->bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B).
% 78.29/77.61  all VarCurr (-v1632(VarCurr)-> (v1635(VarCurr,bitIndex5)<->v460(VarCurr,bitIndex7))& (v1635(VarCurr,bitIndex4)<->v460(VarCurr,bitIndex6))& (v1635(VarCurr,bitIndex3)<->v460(VarCurr,bitIndex5))& (v1635(VarCurr,bitIndex2)<->v460(VarCurr,bitIndex4))& (v1635(VarCurr,bitIndex1)<->v460(VarCurr,bitIndex3))& (v1635(VarCurr,bitIndex0)<->v460(VarCurr,bitIndex2))).
% 78.29/77.61  all VarCurr (v1632(VarCurr)-> (all B (range_5_0(B)-> (v1635(VarCurr,B)<->v1636(VarCurr,B))))).
% 78.29/77.61  all VarCurr (v1636(VarCurr,bitIndex0)<->v632(VarCurr)).
% 78.29/77.61  all VarCurr (v1636(VarCurr,bitIndex1)<->v630(VarCurr)).
% 78.29/77.61  all VarCurr (v1636(VarCurr,bitIndex2)<->v625(VarCurr)).
% 78.29/77.61  all VarCurr (v1636(VarCurr,bitIndex3)<->v620(VarCurr)).
% 78.29/77.61  all VarCurr (v1636(VarCurr,bitIndex4)<->v615(VarCurr)).
% 78.29/77.61  all VarCurr (v1636(VarCurr,bitIndex5)<->v606(VarCurr)).
% 78.29/77.61  all VarCurr (v1632(VarCurr)<->v460(VarCurr,bitIndex0)|v460(VarCurr,bitIndex1)).
% 78.29/77.61  all VarNext (v384(VarNext,bitIndex3)<->v1620(VarNext,bitIndex2)).
% 78.29/77.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1621(VarNext)-> (v1620(VarNext,bitIndex6)<->v384(VarCurr,bitIndex7))& (v1620(VarNext,bitIndex5)<->v384(VarCurr,bitIndex6))& (v1620(VarNext,bitIndex4)<->v384(VarCurr,bitIndex5))& (v1620(VarNext,bitIndex3)<->v384(VarCurr,bitIndex4))& (v1620(VarNext,bitIndex2)<->v384(VarCurr,bitIndex3))& (v1620(VarNext,bitIndex1)<->v384(VarCurr,bitIndex2))& (v1620(VarNext,bitIndex0)<->v384(VarCurr,bitIndex1)))).
% 78.29/77.61  all VarNext (v1621(VarNext)-> (all B (range_6_0(B)-> (v1620(VarNext,B)<->v1216(VarNext,B))))).
% 78.29/77.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1621(VarNext)<->v1622(VarNext))).
% 78.29/77.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1622(VarNext)<->v1624(VarNext)&v355(VarNext))).
% 78.29/77.61  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1624(VarNext)<->v362(VarNext))).
% 78.29/77.61  all VarCurr (-v1586(VarCurr)-> (v386(VarCurr,bitIndex3)<->$F)).
% 78.29/77.61  all VarCurr (v1586(VarCurr)-> (v386(VarCurr,bitIndex3)<->v1618(VarCurr))).
% 78.29/77.61  all VarCurr (-v1590(VarCurr)& -v1591(VarCurr)& -v1596(VarCurr)& -v1602(VarCurr)&v1610(VarCurr)-> (v1618(VarCurr)<->v1139(VarCurr,bitIndex13))).
% 78.29/77.61  all VarCurr (-v1590(VarCurr)& -v1591(VarCurr)& -v1596(VarCurr)&v1602(VarCurr)-> (v1618(VarCurr)<->v1124(VarCurr,bitIndex13))).
% 78.29/77.61  all VarCurr (-v1590(VarCurr)& -v1591(VarCurr)&v1596(VarCurr)-> (v1618(VarCurr)<->v1109(VarCurr,bitIndex13))).
% 78.29/77.61  all VarCurr (-v1590(VarCurr)&v1591(VarCurr)-> (v1618(VarCurr)<->$T)).
% 78.29/77.61  all VarCurr (v1590(VarCurr)-> (v1618(VarCurr)<->v1094(VarCurr,bitIndex13))).
% 78.29/77.61  all VarCurr (v1586(VarCurr)<->v1587(VarCurr)|v1610(VarCurr)).
% 78.29/77.61  all VarCurr (v1610(VarCurr)<->v1611(VarCurr)&v1615(VarCurr)).
% 78.29/77.61  all VarCurr (v1615(VarCurr)<->v1616(VarCurr)&v1617(VarCurr)).
% 78.29/77.61  all VarCurr (-v1617(VarCurr)<->v1609(VarCurr)).
% 78.29/77.61  all VarCurr (v1616(VarCurr)<->v24(VarCurr)&v1200(VarCurr)).
% 78.29/77.61  all VarCurr (-v1611(VarCurr)<->v1612(VarCurr)).
% 78.29/77.61  all VarCurr (v1612(VarCurr)<->v1613(VarCurr)|v1187(VarCurr)).
% 78.29/77.61  all VarCurr (v1613(VarCurr)<->v1614(VarCurr)|v1174(VarCurr)).
% 78.29/77.61  all VarCurr (v1614(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.29/77.61  all VarCurr (v1587(VarCurr)<->v1588(VarCurr)|v1602(VarCurr)).
% 78.29/77.61  all VarCurr (v1602(VarCurr)<->v1603(VarCurr)&v1606(VarCurr)).
% 78.29/77.61  all VarCurr (v1606(VarCurr)<->v1607(VarCurr)&v1608(VarCurr)).
% 78.29/77.61  all VarCurr (-v1608(VarCurr)<->v1609(VarCurr)).
% 78.29/77.61  all VarCurr (v1609(VarCurr)<->v1177(VarCurr)|v1190(VarCurr)).
% 78.29/77.61  all VarCurr (v1607(VarCurr)<->v24(VarCurr)&v1187(VarCurr)).
% 78.29/77.61  all VarCurr (-v1603(VarCurr)<->v1604(VarCurr)).
% 78.29/77.61  all VarCurr (v1604(VarCurr)<->v1605(VarCurr)|v1174(VarCurr)).
% 78.29/77.61  all VarCurr (v1605(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.29/77.61  all VarCurr (v1588(VarCurr)<->v1589(VarCurr)|v1596(VarCurr)).
% 78.29/77.61  all VarCurr (v1596(VarCurr)<->v1597(VarCurr)&v1599(VarCurr)).
% 78.29/77.61  all VarCurr (v1599(VarCurr)<->v1600(VarCurr)&v1601(VarCurr)).
% 78.29/77.61  all VarCurr (-v1601(VarCurr)<->v1177(VarCurr)).
% 78.29/77.61  all VarCurr (v1600(VarCurr)<->v24(VarCurr)&v1174(VarCurr)).
% 78.29/77.61  all VarCurr (-v1597(VarCurr)<->v1598(VarCurr)).
% 78.29/77.61  all VarCurr (v1598(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.29/77.61  all VarCurr (v1589(VarCurr)<->v1590(VarCurr)|v1591(VarCurr)).
% 78.29/77.61  all VarCurr (v1591(VarCurr)<->v1592(VarCurr)&v1593(VarCurr)).
% 78.29/77.61  all VarCurr (v1593(VarCurr)<->v1594(VarCurr)&v1595(VarCurr)).
% 78.29/77.61  all VarCurr (-v1595(VarCurr)<->v1177(VarCurr)).
% 78.29/77.61  all VarCurr (v1594(VarCurr)<->v395(VarCurr)&v1178(VarCurr)).
% 78.29/77.61  all VarCurr (-v1592(VarCurr)<->v1158(VarCurr)).
% 78.29/77.61  all VarCurr (v1590(VarCurr)<->v24(VarCurr)&v1158(VarCurr)).
% 78.29/77.61  all VarCurr (v1139(VarCurr,bitIndex13)<->v1140(VarCurr,bitIndex13)).
% 78.29/77.61  all VarCurr (-v1584(VarCurr)-> (v1143(VarCurr,bitIndex3)<->$F)).
% 78.29/77.61  all VarCurr (v1584(VarCurr)-> (v1143(VarCurr,bitIndex3)<->v1525(VarCurr,bitIndex3))).
% 78.29/77.62  all VarCurr (-v1584(VarCurr)<->v1346(VarCurr)).
% 78.29/77.62  all VarCurr (-v395(VarCurr)-> (v1525(VarCurr,bitIndex3)<->$F)).
% 78.29/77.62  all VarCurr (v395(VarCurr)-> (v1525(VarCurr,bitIndex3)<->$T)).
% 78.29/77.62  all VarCurr (v1124(VarCurr,bitIndex13)<->v1125(VarCurr,bitIndex13)).
% 78.29/77.62  all VarCurr (-v1581(VarCurr)-> (v1128(VarCurr,bitIndex3)<->$F)).
% 78.29/77.62  all VarCurr (v1581(VarCurr)-> (v1128(VarCurr,bitIndex3)<->v1518(VarCurr,bitIndex3))).
% 78.29/77.62  all VarCurr (-v1581(VarCurr)<->v1346(VarCurr)).
% 78.29/77.62  all VarCurr (-v395(VarCurr)-> (v1518(VarCurr,bitIndex3)<->$F)).
% 78.29/77.62  all VarCurr (v395(VarCurr)-> (v1518(VarCurr,bitIndex3)<->$T)).
% 78.29/77.62  all VarCurr (v1109(VarCurr,bitIndex13)<->v1110(VarCurr,bitIndex13)).
% 78.29/77.62  all VarCurr (-v1578(VarCurr)-> (v1113(VarCurr,bitIndex3)<->$F)).
% 78.29/77.62  all VarCurr (v1578(VarCurr)-> (v1113(VarCurr,bitIndex3)<->v1511(VarCurr,bitIndex3))).
% 78.29/77.62  all VarCurr (-v1578(VarCurr)<->v1346(VarCurr)).
% 78.29/77.62  all VarCurr (-v395(VarCurr)-> (v1511(VarCurr,bitIndex3)<->$F)).
% 78.29/77.62  all VarCurr (v395(VarCurr)-> (v1511(VarCurr,bitIndex3)<->$T)).
% 78.29/77.62  all VarCurr (v1094(VarCurr,bitIndex13)<->v1095(VarCurr,bitIndex13)).
% 78.29/77.62  all VarCurr (-v1575(VarCurr)-> (v1098(VarCurr,bitIndex3)<->$F)).
% 78.29/77.62  all VarCurr (v1575(VarCurr)-> (v1098(VarCurr,bitIndex3)<->v1504(VarCurr,bitIndex3))).
% 78.29/77.62  all VarCurr (-v1575(VarCurr)<->v1346(VarCurr)).
% 78.29/77.62  all VarCurr (-v395(VarCurr)-> (v1504(VarCurr,bitIndex3)<->$F)).
% 78.29/77.62  all VarCurr (v395(VarCurr)-> (v1504(VarCurr,bitIndex3)<->$T)).
% 78.29/77.62  all VarNext (v384(VarNext,bitIndex2)<->v1566(VarNext,bitIndex1)).
% 78.29/77.62  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1567(VarNext)-> (v1566(VarNext,bitIndex6)<->v384(VarCurr,bitIndex7))& (v1566(VarNext,bitIndex5)<->v384(VarCurr,bitIndex6))& (v1566(VarNext,bitIndex4)<->v384(VarCurr,bitIndex5))& (v1566(VarNext,bitIndex3)<->v384(VarCurr,bitIndex4))& (v1566(VarNext,bitIndex2)<->v384(VarCurr,bitIndex3))& (v1566(VarNext,bitIndex1)<->v384(VarCurr,bitIndex2))& (v1566(VarNext,bitIndex0)<->v384(VarCurr,bitIndex1)))).
% 78.29/77.62  all VarNext (v1567(VarNext)-> (all B (range_6_0(B)-> (v1566(VarNext,B)<->v1216(VarNext,B))))).
% 78.29/77.62  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1567(VarNext)<->v1568(VarNext))).
% 78.29/77.62  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1568(VarNext)<->v1570(VarNext)&v355(VarNext))).
% 78.29/77.62  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1570(VarNext)<->v362(VarNext))).
% 78.29/77.62  all VarCurr (-v1531(VarCurr)-> (v386(VarCurr,bitIndex2)<->$F)).
% 78.29/77.62  all VarCurr (v1531(VarCurr)-> (v386(VarCurr,bitIndex2)<->v1564(VarCurr))).
% 78.29/77.62  all VarCurr (-v1535(VarCurr)& -v1536(VarCurr)& -v1542(VarCurr)& -v1548(VarCurr)&v1556(VarCurr)-> (v1564(VarCurr)<->v1139(VarCurr,bitIndex12))).
% 78.29/77.62  all VarCurr (-v1535(VarCurr)& -v1536(VarCurr)& -v1542(VarCurr)&v1548(VarCurr)-> (v1564(VarCurr)<->v1124(VarCurr,bitIndex12))).
% 78.29/77.62  all VarCurr (-v1535(VarCurr)& -v1536(VarCurr)&v1542(VarCurr)-> (v1564(VarCurr)<->v1109(VarCurr,bitIndex12))).
% 78.29/77.62  all VarCurr (-v1535(VarCurr)&v1536(VarCurr)-> (v1564(VarCurr)<->$T)).
% 78.29/77.62  all VarCurr (v1535(VarCurr)-> (v1564(VarCurr)<->v1094(VarCurr,bitIndex12))).
% 78.29/77.62  all VarCurr (v1531(VarCurr)<->v1532(VarCurr)|v1556(VarCurr)).
% 78.29/77.62  all VarCurr (v1556(VarCurr)<->v1557(VarCurr)&v1561(VarCurr)).
% 78.29/77.62  all VarCurr (v1561(VarCurr)<->v1562(VarCurr)&v1563(VarCurr)).
% 78.29/77.62  all VarCurr (-v1563(VarCurr)<->v1555(VarCurr)).
% 78.29/77.62  all VarCurr (v1562(VarCurr)<->v24(VarCurr)&v1200(VarCurr)).
% 78.29/77.62  all VarCurr (-v1557(VarCurr)<->v1558(VarCurr)).
% 78.29/77.62  all VarCurr (v1558(VarCurr)<->v1559(VarCurr)|v1187(VarCurr)).
% 78.29/77.62  all VarCurr (v1559(VarCurr)<->v1560(VarCurr)|v1174(VarCurr)).
% 78.29/77.62  all VarCurr (v1560(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.29/77.62  all VarCurr (v1532(VarCurr)<->v1533(VarCurr)|v1548(VarCurr)).
% 78.29/77.62  all VarCurr (v1548(VarCurr)<->v1549(VarCurr)&v1552(VarCurr)).
% 78.29/77.62  all VarCurr (v1552(VarCurr)<->v1553(VarCurr)&v1554(VarCurr)).
% 78.29/77.62  all VarCurr (-v1554(VarCurr)<->v1555(VarCurr)).
% 78.29/77.62  all VarCurr (v1555(VarCurr)<->v1177(VarCurr)|v1190(VarCurr)).
% 78.29/77.62  all VarCurr (v1553(VarCurr)<->v24(VarCurr)&v1187(VarCurr)).
% 78.29/77.62  all VarCurr (-v1549(VarCurr)<->v1550(VarCurr)).
% 78.29/77.62  all VarCurr (v1550(VarCurr)<->v1551(VarCurr)|v1174(VarCurr)).
% 78.29/77.62  all VarCurr (v1551(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.29/77.62  all VarCurr (v1533(VarCurr)<->v1534(VarCurr)|v1542(VarCurr)).
% 78.29/77.62  all VarCurr (v1542(VarCurr)<->v1543(VarCurr)&v1545(VarCurr)).
% 78.29/77.62  all VarCurr (v1545(VarCurr)<->v1546(VarCurr)&v1547(VarCurr)).
% 78.29/77.62  all VarCurr (-v1547(VarCurr)<->v1177(VarCurr)).
% 78.29/77.62  all VarCurr (v1546(VarCurr)<->v24(VarCurr)&v1174(VarCurr)).
% 78.29/77.62  all VarCurr (-v1543(VarCurr)<->v1544(VarCurr)).
% 78.29/77.62  all VarCurr (v1544(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.29/77.62  all VarCurr (v1534(VarCurr)<->v1535(VarCurr)|v1536(VarCurr)).
% 78.29/77.62  all VarCurr (v1536(VarCurr)<->v1537(VarCurr)&v1538(VarCurr)).
% 78.29/77.62  all VarCurr (v1538(VarCurr)<->v1539(VarCurr)&v1541(VarCurr)).
% 78.29/77.62  all VarCurr (-v1541(VarCurr)<->v1177(VarCurr)).
% 78.29/77.62  all VarCurr (v1539(VarCurr)<->v1540(VarCurr)&v1178(VarCurr)).
% 78.29/77.62  all VarCurr (-v1540(VarCurr)<->v395(VarCurr)).
% 78.29/77.62  all VarCurr (-v1537(VarCurr)<->v1158(VarCurr)).
% 78.29/77.62  all VarCurr (v1535(VarCurr)<->v24(VarCurr)&v1158(VarCurr)).
% 78.29/77.62  all VarCurr (v1139(VarCurr,bitIndex12)<->v1140(VarCurr,bitIndex12)).
% 78.29/77.62  all VarCurr (-v1529(VarCurr)-> (v1143(VarCurr,bitIndex2)<->$F)).
% 78.29/77.62  all VarCurr (v1529(VarCurr)-> (v1143(VarCurr,bitIndex2)<->v1525(VarCurr,bitIndex2))).
% 78.29/77.62  all VarCurr (-v1529(VarCurr)<->v1346(VarCurr)).
% 78.29/77.62  all VarCurr (-v1527(VarCurr)-> (v1525(VarCurr,bitIndex2)<->$F)).
% 78.29/77.62  all VarCurr (v1527(VarCurr)-> (v1525(VarCurr,bitIndex2)<->$T)).
% 78.29/77.62  all VarCurr (-v1527(VarCurr)<->v395(VarCurr)).
% 78.29/77.62  all VarCurr (v1124(VarCurr,bitIndex12)<->v1125(VarCurr,bitIndex12)).
% 78.29/77.62  all VarCurr (-v1522(VarCurr)-> (v1128(VarCurr,bitIndex2)<->$F)).
% 78.29/77.62  all VarCurr (v1522(VarCurr)-> (v1128(VarCurr,bitIndex2)<->v1518(VarCurr,bitIndex2))).
% 78.29/77.62  all VarCurr (-v1522(VarCurr)<->v1346(VarCurr)).
% 78.29/77.62  all VarCurr (-v1520(VarCurr)-> (v1518(VarCurr,bitIndex2)<->$F)).
% 78.29/77.62  all VarCurr (v1520(VarCurr)-> (v1518(VarCurr,bitIndex2)<->$T)).
% 78.29/77.62  all VarCurr (-v1520(VarCurr)<->v395(VarCurr)).
% 78.29/77.62  all VarCurr (v1109(VarCurr,bitIndex12)<->v1110(VarCurr,bitIndex12)).
% 78.29/77.62  all VarCurr (-v1515(VarCurr)-> (v1113(VarCurr,bitIndex2)<->$F)).
% 78.29/77.62  all VarCurr (v1515(VarCurr)-> (v1113(VarCurr,bitIndex2)<->v1511(VarCurr,bitIndex2))).
% 78.29/77.62  all VarCurr (-v1515(VarCurr)<->v1346(VarCurr)).
% 78.29/77.62  all VarCurr (-v1513(VarCurr)-> (v1511(VarCurr,bitIndex2)<->$F)).
% 78.29/77.62  all VarCurr (v1513(VarCurr)-> (v1511(VarCurr,bitIndex2)<->$T)).
% 78.29/77.62  all VarCurr (-v1513(VarCurr)<->v395(VarCurr)).
% 78.29/77.62  all VarCurr (v1094(VarCurr,bitIndex12)<->v1095(VarCurr,bitIndex12)).
% 78.29/77.62  all VarCurr (-v1508(VarCurr)-> (v1098(VarCurr,bitIndex2)<->$F)).
% 78.29/77.62  all VarCurr (v1508(VarCurr)-> (v1098(VarCurr,bitIndex2)<->v1504(VarCurr,bitIndex2))).
% 78.29/77.62  all VarCurr (-v1508(VarCurr)<->v1346(VarCurr)).
% 78.29/77.62  all VarCurr (-v1506(VarCurr)-> (v1504(VarCurr,bitIndex2)<->$F)).
% 78.29/77.62  all VarCurr (v1506(VarCurr)-> (v1504(VarCurr,bitIndex2)<->$T)).
% 78.29/77.62  all VarCurr (-v1506(VarCurr)<->v395(VarCurr)).
% 78.29/77.62  all VarNext (v384(VarNext,bitIndex5)<->v1495(VarNext,bitIndex4)).
% 78.29/77.62  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1496(VarNext)-> (v1495(VarNext,bitIndex6)<->v384(VarCurr,bitIndex7))& (v1495(VarNext,bitIndex5)<->v384(VarCurr,bitIndex6))& (v1495(VarNext,bitIndex4)<->v384(VarCurr,bitIndex5))& (v1495(VarNext,bitIndex3)<->v384(VarCurr,bitIndex4))& (v1495(VarNext,bitIndex2)<->v384(VarCurr,bitIndex3))& (v1495(VarNext,bitIndex1)<->v384(VarCurr,bitIndex2))& (v1495(VarNext,bitIndex0)<->v384(VarCurr,bitIndex1)))).
% 78.29/77.62  all VarNext (v1496(VarNext)-> (all B (range_6_0(B)-> (v1495(VarNext,B)<->v1216(VarNext,B))))).
% 78.29/77.62  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1496(VarNext)<->v1497(VarNext))).
% 78.29/77.62  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1497(VarNext)<->v1499(VarNext)&v355(VarNext))).
% 78.29/77.62  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1499(VarNext)<->v362(VarNext))).
% 78.29/77.62  all VarCurr (-v1440(VarCurr)-> (v386(VarCurr,bitIndex5)<->$F)).
% 78.29/77.62  all VarCurr (v1440(VarCurr)-> (v386(VarCurr,bitIndex5)<->v1492(VarCurr))).
% 78.29/77.62  all VarCurr (-v1445(VarCurr)& -v1446(VarCurr)& -v1458(VarCurr)& -v1466(VarCurr)& -v1473(VarCurr)&v1481(VarCurr)-> (v1492(VarCurr)<->$T)).
% 78.29/77.62  all VarCurr (-v1445(VarCurr)& -v1446(VarCurr)& -v1458(VarCurr)& -v1466(VarCurr)&v1473(VarCurr)-> (v1492(VarCurr)<->v1139(VarCurr,bitIndex15))).
% 78.29/77.62  all VarCurr (-v1445(VarCurr)& -v1446(VarCurr)& -v1458(VarCurr)&v1466(VarCurr)-> (v1492(VarCurr)<->v1124(VarCurr,bitIndex15))).
% 78.29/77.62  all VarCurr (-v1445(VarCurr)& -v1446(VarCurr)&v1458(VarCurr)-> (v1492(VarCurr)<->$T)).
% 78.29/77.62  all VarCurr (-v1445(VarCurr)&v1446(VarCurr)-> (v1492(VarCurr)<->v1493(VarCurr))).
% 78.29/77.62  all VarCurr (v1445(VarCurr)-> (v1492(VarCurr)<->v1094(VarCurr,bitIndex15))).
% 78.29/77.62  all VarCurr (-v24(VarCurr)-> (v1493(VarCurr)<->$T)).
% 78.29/77.62  all VarCurr (v24(VarCurr)-> (v1493(VarCurr)<->v1109(VarCurr,bitIndex15))).
% 78.29/77.62  all VarCurr (v1440(VarCurr)<->v1441(VarCurr)|v1481(VarCurr)).
% 78.29/77.62  all VarCurr (v1481(VarCurr)<->v1482(VarCurr)&v1487(VarCurr)).
% 78.29/77.62  all VarCurr (v1487(VarCurr)<->v1488(VarCurr)&v1491(VarCurr)).
% 78.29/77.62  all VarCurr (-v1491(VarCurr)<->v1457(VarCurr)).
% 78.29/77.62  all VarCurr (v1488(VarCurr)<->v1489(VarCurr)&v1266(VarCurr)).
% 78.29/77.62  all VarCurr (v1489(VarCurr)<->v446(VarCurr)&v1490(VarCurr)).
% 78.29/77.62  all VarCurr (-v1490(VarCurr)<->v1088(VarCurr)).
% 78.29/77.62  all VarCurr (-v1482(VarCurr)<->v1483(VarCurr)).
% 78.29/77.62  all VarCurr (v1483(VarCurr)<->v1484(VarCurr)|v1200(VarCurr)).
% 78.29/77.62  all VarCurr (v1484(VarCurr)<->v1485(VarCurr)|v1187(VarCurr)).
% 78.29/77.62  all VarCurr (v1485(VarCurr)<->v1486(VarCurr)|v1190(VarCurr)).
% 78.29/77.62  all VarCurr (v1486(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.29/77.62  all VarCurr (v1441(VarCurr)<->v1442(VarCurr)|v1473(VarCurr)).
% 78.29/77.62  all VarCurr (v1473(VarCurr)<->v1474(VarCurr)&v1478(VarCurr)).
% 78.29/77.62  all VarCurr (v1478(VarCurr)<->v1479(VarCurr)&v1480(VarCurr)).
% 78.29/77.62  all VarCurr (-v1480(VarCurr)<->v1457(VarCurr)).
% 78.29/77.62  all VarCurr (v1479(VarCurr)<->v24(VarCurr)&v1200(VarCurr)).
% 78.29/77.62  all VarCurr (-v1474(VarCurr)<->v1475(VarCurr)).
% 78.29/77.62  all VarCurr (v1475(VarCurr)<->v1476(VarCurr)|v1187(VarCurr)).
% 78.29/77.62  all VarCurr (v1476(VarCurr)<->v1477(VarCurr)|v1190(VarCurr)).
% 78.29/77.62  all VarCurr (v1477(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.29/77.62  all VarCurr (v1442(VarCurr)<->v1443(VarCurr)|v1466(VarCurr)).
% 78.29/77.62  all VarCurr (v1466(VarCurr)<->v1467(VarCurr)&v1470(VarCurr)).
% 78.29/77.62  all VarCurr (v1470(VarCurr)<->v1471(VarCurr)&v1472(VarCurr)).
% 78.29/77.62  all VarCurr (-v1472(VarCurr)<->v1457(VarCurr)).
% 78.29/77.62  all VarCurr (v1471(VarCurr)<->v24(VarCurr)&v1187(VarCurr)).
% 78.29/77.62  all VarCurr (-v1467(VarCurr)<->v1468(VarCurr)).
% 78.29/77.62  all VarCurr (v1468(VarCurr)<->v1469(VarCurr)|v1190(VarCurr)).
% 78.29/77.62  all VarCurr (v1469(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.29/77.62  all VarCurr (v1443(VarCurr)<->v1444(VarCurr)|v1458(VarCurr)).
% 78.29/77.62  all VarCurr (v1458(VarCurr)<->v1459(VarCurr)&v1461(VarCurr)).
% 78.29/77.62  all VarCurr (v1461(VarCurr)<->v1462(VarCurr)&v1465(VarCurr)).
% 78.29/77.62  all VarCurr (-v1465(VarCurr)<->v1457(VarCurr)).
% 78.29/77.62  all VarCurr (v1462(VarCurr)<->v1463(VarCurr)&v1190(VarCurr)).
% 78.29/77.62  all VarCurr (v1463(VarCurr)<->v446(VarCurr)&v1464(VarCurr)).
% 78.29/77.62  all VarCurr (-v1464(VarCurr)<->v1241(VarCurr)).
% 78.29/77.62  all VarCurr (-v1459(VarCurr)<->v1460(VarCurr)).
% 78.29/77.62  all VarCurr (v1460(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.29/77.62  all VarCurr (v1444(VarCurr)<->v1445(VarCurr)|v1446(VarCurr)).
% 78.29/77.62  all VarCurr (v1446(VarCurr)<->v1447(VarCurr)&v1448(VarCurr)).
% 78.29/77.62  all VarCurr (v1448(VarCurr)<->v1449(VarCurr)&v1456(VarCurr)).
% 78.29/77.62  all VarCurr (-v1456(VarCurr)<->v1457(VarCurr)).
% 78.29/77.62  all VarCurr (v1457(VarCurr)<->v1177(VarCurr)|v1178(VarCurr)).
% 78.29/77.62  all VarCurr (v1449(VarCurr)<->v1450(VarCurr)&v1174(VarCurr)).
% 78.29/77.62  all VarCurr (v1450(VarCurr)<->v24(VarCurr)|v1451(VarCurr)).
% 78.29/77.62  all VarCurr (v1451(VarCurr)<->v1452(VarCurr)&v1455(VarCurr)).
% 78.29/77.62  all VarCurr (-v1455(VarCurr)<->v24(VarCurr)).
% 78.29/77.62  all VarCurr (v1452(VarCurr)<->v1453(VarCurr)&v1454(VarCurr)).
% 78.29/77.62  all VarCurr (-v1454(VarCurr)<->v1171(VarCurr)).
% 78.29/77.62  all VarCurr (v1453(VarCurr)<->v446(VarCurr)&v452(VarCurr)).
% 78.29/77.62  all VarCurr (-v1447(VarCurr)<->v1158(VarCurr)).
% 78.29/77.62  all VarCurr (v1445(VarCurr)<->v24(VarCurr)&v1158(VarCurr)).
% 78.29/77.62  all VarCurr (v1139(VarCurr,bitIndex15)<->v1140(VarCurr,bitIndex15)).
% 78.29/77.62  all VarCurr (v1124(VarCurr,bitIndex15)<->v1125(VarCurr,bitIndex15)).
% 78.29/77.62  all VarCurr (v1109(VarCurr,bitIndex15)<->v1110(VarCurr,bitIndex15)).
% 78.29/77.62  all VarCurr (v1094(VarCurr,bitIndex15)<->v1095(VarCurr,bitIndex15)).
% 78.29/77.62  all VarCurr (v446(VarCurr)<->v1433(VarCurr)&v448(VarCurr,bitIndex0)).
% 78.29/77.62  all VarCurr (-v1433(VarCurr)<->v1434(VarCurr)).
% 78.29/77.62  all VarCurr (v1434(VarCurr)<->v1436(VarCurr)|v448(VarCurr,bitIndex5)).
% 78.29/77.62  all VarCurr (v1436(VarCurr)<->v1437(VarCurr)|v448(VarCurr,bitIndex4)).
% 78.29/77.62  all VarCurr (v1437(VarCurr)<->v1438(VarCurr)|v448(VarCurr,bitIndex3)).
% 78.29/77.62  all VarCurr (v1438(VarCurr)<->v448(VarCurr,bitIndex1)|v448(VarCurr,bitIndex2)).
% 78.29/77.62  all VarCurr (v454(VarCurr,bitIndex5)<->v635(VarCurr,bitIndex5)).
% 78.29/77.62  all VarCurr (v458(VarCurr,bitIndex7)<->v603(VarCurr,bitIndex5)).
% 78.29/77.63  all VarCurr (v454(VarCurr,bitIndex4)<->v635(VarCurr,bitIndex4)).
% 78.29/77.63  all VarCurr (v458(VarCurr,bitIndex6)<->v603(VarCurr,bitIndex4)).
% 78.29/77.63  all VarCurr (v454(VarCurr,bitIndex3)<->v635(VarCurr,bitIndex3)).
% 78.29/77.63  all VarCurr (v458(VarCurr,bitIndex5)<->v603(VarCurr,bitIndex3)).
% 78.29/77.63  all VarCurr (v454(VarCurr,bitIndex2)<->v635(VarCurr,bitIndex2)).
% 78.29/77.63  all VarCurr (v458(VarCurr,bitIndex4)<->v603(VarCurr,bitIndex2)).
% 78.29/77.63  all VarCurr (v454(VarCurr,bitIndex1)<->v635(VarCurr,bitIndex1)).
% 78.29/77.63  all VarCurr (v458(VarCurr,bitIndex3)<->v603(VarCurr,bitIndex1)).
% 78.29/77.63  all VarCurr (-v1371(VarCurr)-> (v450(VarCurr)<->$F)).
% 78.29/77.63  all VarCurr (v1371(VarCurr)-> (v450(VarCurr)<->v1430(VarCurr))).
% 78.29/77.63  all VarCurr (-v1377(VarCurr)& -v1378(VarCurr)& -v1383(VarCurr)& -v1394(VarCurr)& -v1402(VarCurr)& -v1410(VarCurr)&v1419(VarCurr)-> (v1430(VarCurr)<->$T)).
% 78.29/77.63  all VarCurr (-v1377(VarCurr)& -v1378(VarCurr)& -v1383(VarCurr)& -v1394(VarCurr)& -v1402(VarCurr)&v1410(VarCurr)-> (v1430(VarCurr)<->v1139(VarCurr,bitIndex1))).
% 78.29/77.63  all VarCurr (-v1377(VarCurr)& -v1378(VarCurr)& -v1383(VarCurr)& -v1394(VarCurr)&v1402(VarCurr)-> (v1430(VarCurr)<->v1124(VarCurr,bitIndex1))).
% 78.29/77.63  all VarCurr (-v1377(VarCurr)& -v1378(VarCurr)& -v1383(VarCurr)&v1394(VarCurr)-> (v1430(VarCurr)<->$T)).
% 78.29/77.63  all VarCurr (-v1377(VarCurr)& -v1378(VarCurr)&v1383(VarCurr)-> (v1430(VarCurr)<->v1431(VarCurr))).
% 78.29/77.63  all VarCurr (-v1377(VarCurr)&v1378(VarCurr)-> (v1430(VarCurr)<->$T)).
% 78.29/77.63  all VarCurr (v1377(VarCurr)-> (v1430(VarCurr)<->v1094(VarCurr,bitIndex1))).
% 78.29/77.63  all VarCurr (-v24(VarCurr)-> (v1431(VarCurr)<->$T)).
% 78.29/77.63  all VarCurr (v24(VarCurr)-> (v1431(VarCurr)<->v1109(VarCurr,bitIndex1))).
% 78.29/77.63  all VarCurr (v1371(VarCurr)<->v1372(VarCurr)|v1419(VarCurr)).
% 78.29/77.63  all VarCurr (v1419(VarCurr)<->v1420(VarCurr)&v1426(VarCurr)).
% 78.29/77.63  all VarCurr (v1426(VarCurr)<->v1427(VarCurr)&v1429(VarCurr)).
% 78.29/77.63  all VarCurr (-v1429(VarCurr)<->v1177(VarCurr)).
% 78.29/77.63  all VarCurr (v1427(VarCurr)<->v1428(VarCurr)&v1266(VarCurr)).
% 78.29/77.63  all VarCurr (-v1428(VarCurr)<->v1088(VarCurr)).
% 78.29/77.63  all VarCurr (-v1420(VarCurr)<->v1421(VarCurr)).
% 78.29/77.63  all VarCurr (v1421(VarCurr)<->v1422(VarCurr)|v1200(VarCurr)).
% 78.29/77.63  all VarCurr (v1422(VarCurr)<->v1423(VarCurr)|v1187(VarCurr)).
% 78.29/77.63  all VarCurr (v1423(VarCurr)<->v1424(VarCurr)|v1190(VarCurr)).
% 78.29/77.63  all VarCurr (v1424(VarCurr)<->v1425(VarCurr)|v1174(VarCurr)).
% 78.29/77.63  all VarCurr (v1425(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.29/77.63  all VarCurr (v1372(VarCurr)<->v1373(VarCurr)|v1410(VarCurr)).
% 78.29/77.63  all VarCurr (v1410(VarCurr)<->v1411(VarCurr)&v1416(VarCurr)).
% 78.29/77.63  all VarCurr (v1416(VarCurr)<->v1417(VarCurr)&v1418(VarCurr)).
% 78.29/77.63  all VarCurr (-v1418(VarCurr)<->v1177(VarCurr)).
% 78.29/77.63  all VarCurr (v1417(VarCurr)<->v24(VarCurr)&v1200(VarCurr)).
% 78.29/77.63  all VarCurr (-v1411(VarCurr)<->v1412(VarCurr)).
% 78.29/77.63  all VarCurr (v1412(VarCurr)<->v1413(VarCurr)|v1187(VarCurr)).
% 78.29/77.63  all VarCurr (v1413(VarCurr)<->v1414(VarCurr)|v1190(VarCurr)).
% 78.29/77.63  all VarCurr (v1414(VarCurr)<->v1415(VarCurr)|v1174(VarCurr)).
% 78.29/77.63  all VarCurr (v1415(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.29/77.63  all VarCurr (v1373(VarCurr)<->v1374(VarCurr)|v1402(VarCurr)).
% 78.29/77.63  all VarCurr (v1402(VarCurr)<->v1403(VarCurr)&v1407(VarCurr)).
% 78.29/77.63  all VarCurr (v1407(VarCurr)<->v1408(VarCurr)&v1409(VarCurr)).
% 78.29/77.63  all VarCurr (-v1409(VarCurr)<->v1177(VarCurr)).
% 78.29/77.63  all VarCurr (v1408(VarCurr)<->v24(VarCurr)&v1187(VarCurr)).
% 78.29/77.63  all VarCurr (-v1403(VarCurr)<->v1404(VarCurr)).
% 78.29/77.63  all VarCurr (v1404(VarCurr)<->v1405(VarCurr)|v1190(VarCurr)).
% 78.29/77.63  all VarCurr (v1405(VarCurr)<->v1406(VarCurr)|v1174(VarCurr)).
% 78.29/77.63  all VarCurr (v1406(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.29/77.63  all VarCurr (v1374(VarCurr)<->v1375(VarCurr)|v1394(VarCurr)).
% 78.29/77.63  all VarCurr (v1394(VarCurr)<->v1395(VarCurr)&v1398(VarCurr)).
% 78.29/77.63  all VarCurr (v1398(VarCurr)<->v1399(VarCurr)&v1401(VarCurr)).
% 78.29/77.63  all VarCurr (-v1401(VarCurr)<->v1177(VarCurr)).
% 78.29/77.63  all VarCurr (v1399(VarCurr)<->v1400(VarCurr)&v1190(VarCurr)).
% 78.29/77.63  all VarCurr (-v1400(VarCurr)<->v1241(VarCurr)).
% 78.29/77.63  all VarCurr (-v1395(VarCurr)<->v1396(VarCurr)).
% 78.29/77.63  all VarCurr (v1396(VarCurr)<->v1397(VarCurr)|v1174(VarCurr)).
% 78.29/77.63  all VarCurr (v1397(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.29/77.63  all VarCurr (v1375(VarCurr)<->v1376(VarCurr)|v1383(VarCurr)).
% 78.29/77.63  all VarCurr (v1383(VarCurr)<->v1384(VarCurr)&v1386(VarCurr)).
% 78.29/77.63  all VarCurr (v1386(VarCurr)<->v1387(VarCurr)&v1393(VarCurr)).
% 78.29/77.63  all VarCurr (-v1393(VarCurr)<->v1177(VarCurr)).
% 78.29/77.63  all VarCurr (v1387(VarCurr)<->v1388(VarCurr)&v1174(VarCurr)).
% 78.29/77.63  all VarCurr (v1388(VarCurr)<->v24(VarCurr)|v1389(VarCurr)).
% 78.29/77.63  all VarCurr (v1389(VarCurr)<->v1390(VarCurr)&v1392(VarCurr)).
% 78.29/77.63  all VarCurr (-v1392(VarCurr)<->v24(VarCurr)).
% 78.29/77.63  all VarCurr (v1390(VarCurr)<->v452(VarCurr)&v1391(VarCurr)).
% 78.29/77.63  all VarCurr (-v1391(VarCurr)<->v1171(VarCurr)).
% 78.29/77.63  all VarCurr (-v1384(VarCurr)<->v1385(VarCurr)).
% 78.29/77.63  all VarCurr (v1385(VarCurr)<->v1158(VarCurr)|v1178(VarCurr)).
% 78.29/77.63  all VarCurr (v1376(VarCurr)<->v1377(VarCurr)|v1378(VarCurr)).
% 78.29/77.63  all VarCurr (v1378(VarCurr)<->v1379(VarCurr)&v1380(VarCurr)).
% 78.29/77.63  all VarCurr (v1380(VarCurr)<->v1381(VarCurr)&v1382(VarCurr)).
% 78.29/77.63  all VarCurr (-v1382(VarCurr)<->v1177(VarCurr)).
% 78.29/77.63  all VarCurr (v1381(VarCurr)<->v395(VarCurr)&v1178(VarCurr)).
% 78.29/77.63  all VarCurr (-v1379(VarCurr)<->v1158(VarCurr)).
% 78.29/77.63  all VarCurr (v1377(VarCurr)<->v24(VarCurr)&v1158(VarCurr)).
% 78.29/77.63  all VarCurr (v1139(VarCurr,bitIndex1)<->v1140(VarCurr,bitIndex1)).
% 78.29/77.63  all VarCurr (-v1369(VarCurr)-> (v1151(VarCurr)<->$F)).
% 78.29/77.63  all VarCurr (v1369(VarCurr)-> (v1151(VarCurr)<->v1366(VarCurr))).
% 78.29/77.63  all VarCurr (-v1369(VarCurr)<->v1346(VarCurr)).
% 78.29/77.63  all VarCurr (-v395(VarCurr)-> (v1366(VarCurr)<->$F)).
% 78.29/77.63  all VarCurr (v395(VarCurr)-> (v1366(VarCurr)<->$T)).
% 78.29/77.63  all VarCurr (v1124(VarCurr,bitIndex1)<->v1125(VarCurr,bitIndex1)).
% 78.29/77.63  all VarCurr (-v1363(VarCurr)-> (v1136(VarCurr)<->$F)).
% 78.29/77.63  all VarCurr (v1363(VarCurr)-> (v1136(VarCurr)<->v1360(VarCurr))).
% 78.29/77.63  all VarCurr (-v1363(VarCurr)<->v1346(VarCurr)).
% 78.29/77.63  all VarCurr (-v395(VarCurr)-> (v1360(VarCurr)<->$F)).
% 78.29/77.63  all VarCurr (v395(VarCurr)-> (v1360(VarCurr)<->$T)).
% 78.29/77.63  all VarCurr (v1109(VarCurr,bitIndex1)<->v1110(VarCurr,bitIndex1)).
% 78.29/77.63  all VarCurr (-v1357(VarCurr)-> (v1121(VarCurr)<->$F)).
% 78.29/77.63  all VarCurr (v1357(VarCurr)-> (v1121(VarCurr)<->v1354(VarCurr))).
% 78.29/77.63  all VarCurr (-v1357(VarCurr)<->v1346(VarCurr)).
% 78.29/77.63  all VarCurr (-v395(VarCurr)-> (v1354(VarCurr)<->$F)).
% 78.29/77.63  all VarCurr (v395(VarCurr)-> (v1354(VarCurr)<->$T)).
% 78.29/77.63  all VarCurr (v1094(VarCurr,bitIndex1)<->v1095(VarCurr,bitIndex1)).
% 78.29/77.63  all VarCurr (-v1351(VarCurr)-> (v1106(VarCurr)<->$F)).
% 78.29/77.63  all VarCurr (v1351(VarCurr)-> (v1106(VarCurr)<->v1348(VarCurr))).
% 78.29/77.63  all VarCurr (-v1351(VarCurr)<->v1346(VarCurr)).
% 78.29/77.63  all VarCurr (-v395(VarCurr)-> (v1348(VarCurr)<->$F)).
% 78.29/77.63  all VarCurr (v395(VarCurr)-> (v1348(VarCurr)<->$T)).
% 78.29/77.63  all VarCurr (v1346(VarCurr)<->v34(VarCurr)).
% 78.29/77.63  all VarNext (v384(VarNext,bitIndex4)<->v1337(VarNext,bitIndex3)).
% 78.29/77.63  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1338(VarNext)-> (v1337(VarNext,bitIndex6)<->v384(VarCurr,bitIndex7))& (v1337(VarNext,bitIndex5)<->v384(VarCurr,bitIndex6))& (v1337(VarNext,bitIndex4)<->v384(VarCurr,bitIndex5))& (v1337(VarNext,bitIndex3)<->v384(VarCurr,bitIndex4))& (v1337(VarNext,bitIndex2)<->v384(VarCurr,bitIndex3))& (v1337(VarNext,bitIndex1)<->v384(VarCurr,bitIndex2))& (v1337(VarNext,bitIndex0)<->v384(VarCurr,bitIndex1)))).
% 78.29/77.63  all VarNext (v1338(VarNext)-> (all B (range_6_0(B)-> (v1337(VarNext,B)<->v1216(VarNext,B))))).
% 78.29/77.63  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1338(VarNext)<->v1339(VarNext))).
% 78.29/77.63  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1339(VarNext)<->v1341(VarNext)&v355(VarNext))).
% 78.29/77.63  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1341(VarNext)<->v362(VarNext))).
% 78.29/77.63  all VarCurr (-v1279(VarCurr)-> (v386(VarCurr,bitIndex4)<->$F)).
% 78.29/77.63  all VarCurr (v1279(VarCurr)-> (v386(VarCurr,bitIndex4)<->v1334(VarCurr))).
% 78.29/77.63  all VarCurr (-v1284(VarCurr)& -v1285(VarCurr)& -v1298(VarCurr)& -v1307(VarCurr)& -v1314(VarCurr)&v1322(VarCurr)-> (v1334(VarCurr)<->$T)).
% 78.29/77.63  all VarCurr (-v1284(VarCurr)& -v1285(VarCurr)& -v1298(VarCurr)& -v1307(VarCurr)&v1314(VarCurr)-> (v1334(VarCurr)<->v1139(VarCurr,bitIndex14))).
% 78.29/77.63  all VarCurr (-v1284(VarCurr)& -v1285(VarCurr)& -v1298(VarCurr)&v1307(VarCurr)-> (v1334(VarCurr)<->v1124(VarCurr,bitIndex14))).
% 78.29/77.63  all VarCurr (-v1284(VarCurr)& -v1285(VarCurr)&v1298(VarCurr)-> (v1334(VarCurr)<->$T)).
% 78.29/77.63  all VarCurr (-v1284(VarCurr)&v1285(VarCurr)-> (v1334(VarCurr)<->v1335(VarCurr))).
% 78.29/77.63  all VarCurr (v1284(VarCurr)-> (v1334(VarCurr)<->v1094(VarCurr,bitIndex14))).
% 78.29/77.63  all VarCurr (-v24(VarCurr)-> (v1335(VarCurr)<->$T)).
% 78.29/77.63  all VarCurr (v24(VarCurr)-> (v1335(VarCurr)<->v1109(VarCurr,bitIndex14))).
% 78.29/77.63  all VarCurr (v1279(VarCurr)<->v1280(VarCurr)|v1322(VarCurr)).
% 78.29/77.63  all VarCurr (v1322(VarCurr)<->v1323(VarCurr)&v1328(VarCurr)).
% 78.29/77.63  all VarCurr (v1328(VarCurr)<->v1329(VarCurr)&v1333(VarCurr)).
% 78.29/77.63  all VarCurr (-v1333(VarCurr)<->v1297(VarCurr)).
% 78.29/77.63  all VarCurr (v1329(VarCurr)<->v1330(VarCurr)&v1266(VarCurr)).
% 78.29/77.63  all VarCurr (v1330(VarCurr)<->v1331(VarCurr)&v1332(VarCurr)).
% 78.29/77.63  all VarCurr (-v1332(VarCurr)<->v1088(VarCurr)).
% 78.29/77.63  all VarCurr (-v1331(VarCurr)<->v446(VarCurr)).
% 78.29/77.63  all VarCurr (-v1323(VarCurr)<->v1324(VarCurr)).
% 78.29/77.63  all VarCurr (v1324(VarCurr)<->v1325(VarCurr)|v1200(VarCurr)).
% 78.29/77.63  all VarCurr (v1325(VarCurr)<->v1326(VarCurr)|v1187(VarCurr)).
% 78.29/77.63  all VarCurr (v1326(VarCurr)<->v1327(VarCurr)|v1190(VarCurr)).
% 78.29/77.63  all VarCurr (v1327(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.29/77.63  all VarCurr (v1280(VarCurr)<->v1281(VarCurr)|v1314(VarCurr)).
% 78.29/77.63  all VarCurr (v1314(VarCurr)<->v1315(VarCurr)&v1319(VarCurr)).
% 78.29/77.63  all VarCurr (v1319(VarCurr)<->v1320(VarCurr)&v1321(VarCurr)).
% 78.29/77.63  all VarCurr (-v1321(VarCurr)<->v1297(VarCurr)).
% 78.29/77.63  all VarCurr (v1320(VarCurr)<->v24(VarCurr)&v1200(VarCurr)).
% 78.29/77.63  all VarCurr (-v1315(VarCurr)<->v1316(VarCurr)).
% 78.29/77.63  all VarCurr (v1316(VarCurr)<->v1317(VarCurr)|v1187(VarCurr)).
% 78.29/77.63  all VarCurr (v1317(VarCurr)<->v1318(VarCurr)|v1190(VarCurr)).
% 78.29/77.63  all VarCurr (v1318(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.29/77.63  all VarCurr (v1281(VarCurr)<->v1282(VarCurr)|v1307(VarCurr)).
% 78.29/77.63  all VarCurr (v1307(VarCurr)<->v1308(VarCurr)&v1311(VarCurr)).
% 78.29/77.63  all VarCurr (v1311(VarCurr)<->v1312(VarCurr)&v1313(VarCurr)).
% 78.29/77.63  all VarCurr (-v1313(VarCurr)<->v1297(VarCurr)).
% 78.29/77.63  all VarCurr (v1312(VarCurr)<->v24(VarCurr)&v1187(VarCurr)).
% 78.29/77.63  all VarCurr (-v1308(VarCurr)<->v1309(VarCurr)).
% 78.29/77.63  all VarCurr (v1309(VarCurr)<->v1310(VarCurr)|v1190(VarCurr)).
% 78.29/77.63  all VarCurr (v1310(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.29/77.63  all VarCurr (v1282(VarCurr)<->v1283(VarCurr)|v1298(VarCurr)).
% 78.29/77.63  all VarCurr (v1298(VarCurr)<->v1299(VarCurr)&v1301(VarCurr)).
% 78.29/77.63  all VarCurr (v1301(VarCurr)<->v1302(VarCurr)&v1306(VarCurr)).
% 78.29/77.63  all VarCurr (-v1306(VarCurr)<->v1297(VarCurr)).
% 78.29/77.63  all VarCurr (v1302(VarCurr)<->v1303(VarCurr)&v1190(VarCurr)).
% 78.29/77.63  all VarCurr (v1303(VarCurr)<->v1304(VarCurr)&v1305(VarCurr)).
% 78.29/77.63  all VarCurr (-v1305(VarCurr)<->v1241(VarCurr)).
% 78.29/77.63  all VarCurr (-v1304(VarCurr)<->v446(VarCurr)).
% 78.29/77.63  all VarCurr (-v1299(VarCurr)<->v1300(VarCurr)).
% 78.29/77.63  all VarCurr (v1300(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.29/77.63  all VarCurr (v1283(VarCurr)<->v1284(VarCurr)|v1285(VarCurr)).
% 78.29/77.63  all VarCurr (v1285(VarCurr)<->v1286(VarCurr)&v1287(VarCurr)).
% 78.29/77.63  all VarCurr (v1287(VarCurr)<->v1288(VarCurr)&v1296(VarCurr)).
% 78.29/77.63  all VarCurr (-v1296(VarCurr)<->v1297(VarCurr)).
% 78.29/77.63  all VarCurr (v1297(VarCurr)<->v1177(VarCurr)|v1178(VarCurr)).
% 78.29/77.63  all VarCurr (v1288(VarCurr)<->v1289(VarCurr)&v1174(VarCurr)).
% 78.29/77.63  all VarCurr (v1289(VarCurr)<->v24(VarCurr)|v1290(VarCurr)).
% 78.29/77.63  all VarCurr (v1290(VarCurr)<->v1291(VarCurr)&v1295(VarCurr)).
% 78.29/77.63  all VarCurr (-v1295(VarCurr)<->v24(VarCurr)).
% 78.29/77.63  all VarCurr (v1291(VarCurr)<->v1292(VarCurr)&v1294(VarCurr)).
% 78.29/77.63  all VarCurr (-v1294(VarCurr)<->v1171(VarCurr)).
% 78.29/77.63  all VarCurr (v1292(VarCurr)<->v1293(VarCurr)&v452(VarCurr)).
% 78.29/77.63  all VarCurr (-v1293(VarCurr)<->v446(VarCurr)).
% 78.29/77.63  all VarCurr (-v1286(VarCurr)<->v1158(VarCurr)).
% 78.29/77.63  all VarCurr (v1284(VarCurr)<->v24(VarCurr)&v1158(VarCurr)).
% 78.29/77.63  all VarCurr (v1139(VarCurr,bitIndex14)<->v1140(VarCurr,bitIndex14)).
% 78.29/77.63  all VarCurr (v1124(VarCurr,bitIndex14)<->v1125(VarCurr,bitIndex14)).
% 78.29/77.63  all VarCurr (v1109(VarCurr,bitIndex14)<->v1110(VarCurr,bitIndex14)).
% 78.29/77.63  all VarCurr (v1094(VarCurr,bitIndex14)<->v1095(VarCurr,bitIndex14)).
% 78.29/77.63  all VarNext (v384(VarNext,bitIndex7)<->v1271(VarNext,bitIndex6)).
% 78.29/77.63  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1272(VarNext)-> (v1271(VarNext,bitIndex6)<->v384(VarCurr,bitIndex7))& (v1271(VarNext,bitIndex5)<->v384(VarCurr,bitIndex6))& (v1271(VarNext,bitIndex4)<->v384(VarCurr,bitIndex5))& (v1271(VarNext,bitIndex3)<->v384(VarCurr,bitIndex4))& (v1271(VarNext,bitIndex2)<->v384(VarCurr,bitIndex3))& (v1271(VarNext,bitIndex1)<->v384(VarCurr,bitIndex2))& (v1271(VarNext,bitIndex0)<->v384(VarCurr,bitIndex1)))).
% 78.29/77.63  all VarNext (v1272(VarNext)-> (all B (range_6_0(B)-> (v1271(VarNext,B)<->v1216(VarNext,B))))).
% 78.29/77.63  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1272(VarNext)<->v1273(VarNext))).
% 78.29/77.63  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1273(VarNext)<->v1275(VarNext)&v355(VarNext))).
% 78.29/77.63  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1275(VarNext)<->v362(VarNext))).
% 78.29/77.63  all VarCurr (-v1221(VarCurr)-> (v386(VarCurr,bitIndex7)<->$F)).
% 78.29/77.63  all VarCurr (v1221(VarCurr)-> (v386(VarCurr,bitIndex7)<->v1268(VarCurr))).
% 78.29/77.63  all VarCurr (-v1226(VarCurr)& -v1227(VarCurr)& -v1236(VarCurr)& -v1243(VarCurr)& -v1250(VarCurr)&v1258(VarCurr)-> (v1268(VarCurr)<->$T)).
% 78.29/77.63  all VarCurr (-v1226(VarCurr)& -v1227(VarCurr)& -v1236(VarCurr)& -v1243(VarCurr)&v1250(VarCurr)-> (v1268(VarCurr)<->v1139(VarCurr,bitIndex17))).
% 78.29/77.63  all VarCurr (-v1226(VarCurr)& -v1227(VarCurr)& -v1236(VarCurr)&v1243(VarCurr)-> (v1268(VarCurr)<->v1124(VarCurr,bitIndex17))).
% 78.29/77.63  all VarCurr (-v1226(VarCurr)& -v1227(VarCurr)&v1236(VarCurr)-> (v1268(VarCurr)<->$T)).
% 78.29/77.63  all VarCurr (-v1226(VarCurr)&v1227(VarCurr)-> (v1268(VarCurr)<->v1269(VarCurr))).
% 78.29/77.63  all VarCurr (v1226(VarCurr)-> (v1268(VarCurr)<->v1094(VarCurr,bitIndex17))).
% 78.29/77.63  all VarCurr (-v24(VarCurr)-> (v1269(VarCurr)<->$T)).
% 78.29/77.63  all VarCurr (v24(VarCurr)-> (v1269(VarCurr)<->v1109(VarCurr,bitIndex17))).
% 78.29/77.63  all VarCurr (v1221(VarCurr)<->v1222(VarCurr)|v1258(VarCurr)).
% 78.29/77.63  all VarCurr (v1258(VarCurr)<->v1259(VarCurr)&v1264(VarCurr)).
% 78.29/77.63  all VarCurr (v1264(VarCurr)<->v1265(VarCurr)&v1267(VarCurr)).
% 78.29/77.63  all VarCurr (-v1267(VarCurr)<->v1235(VarCurr)).
% 78.29/77.63  all VarCurr (v1265(VarCurr)<->v1088(VarCurr)&v1266(VarCurr)).
% 78.29/77.63  all VarCurr (v1266(VarCurr)<-> ($T<->v384(VarCurr,bitIndex7))).
% 78.29/77.63  all VarCurr (-v1259(VarCurr)<->v1260(VarCurr)).
% 78.29/77.63  all VarCurr (v1260(VarCurr)<->v1261(VarCurr)|v1200(VarCurr)).
% 78.29/77.63  all VarCurr (v1261(VarCurr)<->v1262(VarCurr)|v1187(VarCurr)).
% 78.29/77.63  all VarCurr (v1262(VarCurr)<->v1263(VarCurr)|v1190(VarCurr)).
% 78.29/77.63  all VarCurr (v1263(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.29/77.63  all VarCurr (v1222(VarCurr)<->v1223(VarCurr)|v1250(VarCurr)).
% 78.29/77.63  all VarCurr (v1250(VarCurr)<->v1251(VarCurr)&v1255(VarCurr)).
% 78.29/77.63  all VarCurr (v1255(VarCurr)<->v1256(VarCurr)&v1257(VarCurr)).
% 78.29/77.63  all VarCurr (-v1257(VarCurr)<->v1235(VarCurr)).
% 78.29/77.63  all VarCurr (v1256(VarCurr)<->v24(VarCurr)&v1200(VarCurr)).
% 78.29/77.63  all VarCurr (-v1251(VarCurr)<->v1252(VarCurr)).
% 78.29/77.63  all VarCurr (v1252(VarCurr)<->v1253(VarCurr)|v1187(VarCurr)).
% 78.29/77.63  all VarCurr (v1253(VarCurr)<->v1254(VarCurr)|v1190(VarCurr)).
% 78.29/77.63  all VarCurr (v1254(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.29/77.63  all VarCurr (v1223(VarCurr)<->v1224(VarCurr)|v1243(VarCurr)).
% 78.29/77.63  all VarCurr (v1243(VarCurr)<->v1244(VarCurr)&v1247(VarCurr)).
% 78.29/77.63  all VarCurr (v1247(VarCurr)<->v1248(VarCurr)&v1249(VarCurr)).
% 78.29/77.63  all VarCurr (-v1249(VarCurr)<->v1235(VarCurr)).
% 78.29/77.63  all VarCurr (v1248(VarCurr)<->v24(VarCurr)&v1187(VarCurr)).
% 78.29/77.63  all VarCurr (-v1244(VarCurr)<->v1245(VarCurr)).
% 78.29/77.63  all VarCurr (v1245(VarCurr)<->v1246(VarCurr)|v1190(VarCurr)).
% 78.29/77.63  all VarCurr (v1246(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.29/77.63  all VarCurr (v1224(VarCurr)<->v1225(VarCurr)|v1236(VarCurr)).
% 78.29/77.63  all VarCurr (v1236(VarCurr)<->v1237(VarCurr)&v1239(VarCurr)).
% 78.29/77.63  all VarCurr (v1239(VarCurr)<->v1240(VarCurr)&v1242(VarCurr)).
% 78.29/77.63  all VarCurr (-v1242(VarCurr)<->v1235(VarCurr)).
% 78.29/77.63  all VarCurr (v1240(VarCurr)<->v1241(VarCurr)&v1190(VarCurr)).
% 78.29/77.63  all VarCurr (v1241(VarCurr)<->v1083(VarCurr)&v1088(VarCurr)).
% 78.29/77.63  all VarCurr (-v1237(VarCurr)<->v1238(VarCurr)).
% 78.29/77.63  all VarCurr (v1238(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.29/77.63  all VarCurr (v1225(VarCurr)<->v1226(VarCurr)|v1227(VarCurr)).
% 78.29/77.63  all VarCurr (v1227(VarCurr)<->v1228(VarCurr)&v1229(VarCurr)).
% 78.29/77.63  all VarCurr (v1229(VarCurr)<->v1230(VarCurr)&v1234(VarCurr)).
% 78.29/77.63  all VarCurr (-v1234(VarCurr)<->v1235(VarCurr)).
% 78.29/77.63  all VarCurr (v1235(VarCurr)<->v1177(VarCurr)|v1178(VarCurr)).
% 78.29/77.63  all VarCurr (v1230(VarCurr)<->v1231(VarCurr)&v1174(VarCurr)).
% 78.29/77.63  all VarCurr (v1231(VarCurr)<->v24(VarCurr)|v1232(VarCurr)).
% 78.29/77.63  all VarCurr (v1232(VarCurr)<->v1171(VarCurr)&v1233(VarCurr)).
% 78.29/77.63  all VarCurr (-v1233(VarCurr)<->v24(VarCurr)).
% 78.29/77.63  all VarCurr (-v1228(VarCurr)<->v1158(VarCurr)).
% 78.29/77.63  all VarCurr (v1226(VarCurr)<->v24(VarCurr)&v1158(VarCurr)).
% 78.29/77.63  all VarCurr (v1139(VarCurr,bitIndex17)<->v1140(VarCurr,bitIndex17)).
% 78.29/77.63  all VarCurr (v1124(VarCurr,bitIndex17)<->v1125(VarCurr,bitIndex17)).
% 78.29/77.63  all VarCurr (v1109(VarCurr,bitIndex17)<->v1110(VarCurr,bitIndex17)).
% 78.29/77.63  all VarCurr (v1094(VarCurr,bitIndex17)<->v1095(VarCurr,bitIndex17)).
% 78.29/77.63  all VarNext (v384(VarNext,bitIndex6)<->v1207(VarNext,bitIndex5)).
% 78.29/77.63  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1208(VarNext)-> (v1207(VarNext,bitIndex6)<->v384(VarCurr,bitIndex7))& (v1207(VarNext,bitIndex5)<->v384(VarCurr,bitIndex6))& (v1207(VarNext,bitIndex4)<->v384(VarCurr,bitIndex5))& (v1207(VarNext,bitIndex3)<->v384(VarCurr,bitIndex4))& (v1207(VarNext,bitIndex2)<->v384(VarCurr,bitIndex3))& (v1207(VarNext,bitIndex1)<->v384(VarCurr,bitIndex2))& (v1207(VarNext,bitIndex0)<->v384(VarCurr,bitIndex1)))).
% 78.29/77.63  all VarNext (v1208(VarNext)-> (all B (range_6_0(B)-> (v1207(VarNext,B)<->v1216(VarNext,B))))).
% 78.29/77.63  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_6_0(B)-> (v1216(VarNext,B)<->v1214(VarCurr,B))))).
% 78.29/77.63  all VarCurr (-v1217(VarCurr)-> (v1214(VarCurr,bitIndex6)<->v386(VarCurr,bitIndex7))& (v1214(VarCurr,bitIndex5)<->v386(VarCurr,bitIndex6))& (v1214(VarCurr,bitIndex4)<->v386(VarCurr,bitIndex5))& (v1214(VarCurr,bitIndex3)<->v386(VarCurr,bitIndex4))& (v1214(VarCurr,bitIndex2)<->v386(VarCurr,bitIndex3))& (v1214(VarCurr,bitIndex1)<->v386(VarCurr,bitIndex2))& (v1214(VarCurr,bitIndex0)<->v386(VarCurr,bitIndex1))).
% 78.29/77.63  all VarCurr (v1217(VarCurr)-> (all B (range_6_0(B)-> (v1214(VarCurr,B)<->$F)))).
% 78.29/77.63  all B (range_6_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B|bitIndex6=B).
% 78.29/77.63  all VarCurr (-v1217(VarCurr)<->v15(VarCurr)).
% 78.29/77.63  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1208(VarNext)<->v1209(VarNext))).
% 78.29/77.63  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v1209(VarNext)<->v1210(VarNext)&v355(VarNext))).
% 78.29/77.63  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v1210(VarNext)<->v362(VarNext))).
% 78.29/77.63  all VarCurr (-v1154(VarCurr)-> (v386(VarCurr,bitIndex6)<->$F)).
% 78.29/77.63  all VarCurr (v1154(VarCurr)-> (v386(VarCurr,bitIndex6)<->v1202(VarCurr))).
% 78.29/77.63  all VarCurr (-v1157(VarCurr)& -v1159(VarCurr)& -v1179(VarCurr)&v1191(VarCurr)-> (v1202(VarCurr)<->v1205(VarCurr))).
% 78.29/77.63  all VarCurr (-v1157(VarCurr)& -v1159(VarCurr)&v1179(VarCurr)-> (v1202(VarCurr)<->v1204(VarCurr))).
% 78.29/77.63  all VarCurr (-v1157(VarCurr)&v1159(VarCurr)-> (v1202(VarCurr)<->v1203(VarCurr))).
% 78.29/77.63  all VarCurr (v1157(VarCurr)-> (v1202(VarCurr)<->v1094(VarCurr,bitIndex16))).
% 78.29/77.63  all VarCurr (-v24(VarCurr)-> (v1205(VarCurr)<->$T)).
% 78.29/77.63  all VarCurr (v24(VarCurr)-> (v1205(VarCurr)<->v1139(VarCurr,bitIndex16))).
% 78.29/77.63  all VarCurr (-v24(VarCurr)-> (v1204(VarCurr)<->$T)).
% 78.29/77.63  all VarCurr (v24(VarCurr)-> (v1204(VarCurr)<->v1124(VarCurr,bitIndex16))).
% 78.29/77.63  all VarCurr (-v24(VarCurr)-> (v1203(VarCurr)<->$T)).
% 78.29/77.63  all VarCurr (v24(VarCurr)-> (v1203(VarCurr)<->v1109(VarCurr,bitIndex16))).
% 78.29/77.63  all VarCurr (v1154(VarCurr)<->v1155(VarCurr)|v1191(VarCurr)).
% 78.29/77.63  all VarCurr (v1191(VarCurr)<->v1192(VarCurr)&v1195(VarCurr)).
% 78.29/77.63  all VarCurr (v1195(VarCurr)<->v1196(VarCurr)&v1201(VarCurr)).
% 78.29/77.63  all VarCurr (-v1201(VarCurr)<->v1189(VarCurr)).
% 78.29/77.63  all VarCurr (v1196(VarCurr)<->v1197(VarCurr)&v1200(VarCurr)).
% 78.29/77.63  all VarCurr (v1200(VarCurr)<-> ($T<->v384(VarCurr,bitIndex6))).
% 78.29/77.63  all VarCurr (v1197(VarCurr)<->v24(VarCurr)|v1198(VarCurr)).
% 78.29/77.63  all VarCurr (v1198(VarCurr)<->v768(VarCurr)&v1199(VarCurr)).
% 78.29/77.63  all VarCurr (-v1199(VarCurr)<->v24(VarCurr)).
% 78.29/77.63  all VarCurr (-v1192(VarCurr)<->v1193(VarCurr)).
% 78.29/77.63  all VarCurr (v1193(VarCurr)<->v1194(VarCurr)|v1187(VarCurr)).
% 78.29/77.63  all VarCurr (v1194(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.29/77.63  all VarCurr (v1155(VarCurr)<->v1156(VarCurr)|v1179(VarCurr)).
% 78.29/77.63  all VarCurr (v1179(VarCurr)<->v1180(VarCurr)&v1182(VarCurr)).
% 78.29/77.63  all VarCurr (v1182(VarCurr)<->v1183(VarCurr)&v1188(VarCurr)).
% 78.29/77.63  all VarCurr (-v1188(VarCurr)<->v1189(VarCurr)).
% 78.29/77.63  all VarCurr (v1189(VarCurr)<->v1176(VarCurr)|v1190(VarCurr)).
% 78.29/77.63  all VarCurr (v1190(VarCurr)<-> ($T<->v384(VarCurr,bitIndex4))).
% 78.29/77.63  all VarCurr (v1183(VarCurr)<->v1184(VarCurr)&v1187(VarCurr)).
% 78.29/77.63  all VarCurr (v1187(VarCurr)<-> ($T<->v384(VarCurr,bitIndex5))).
% 78.29/77.63  all VarCurr (v1184(VarCurr)<->v24(VarCurr)|v1185(VarCurr)).
% 78.29/77.63  all VarCurr (v1185(VarCurr)<->v768(VarCurr)&v1186(VarCurr)).
% 78.29/77.63  all VarCurr (-v1186(VarCurr)<->v24(VarCurr)).
% 78.29/77.63  all VarCurr (-v1180(VarCurr)<->v1181(VarCurr)).
% 78.29/77.63  all VarCurr (v1181(VarCurr)<->v1158(VarCurr)|v1174(VarCurr)).
% 78.29/77.64  all VarCurr (v1156(VarCurr)<->v1157(VarCurr)|v1159(VarCurr)).
% 78.29/77.64  all VarCurr (v1159(VarCurr)<->v1160(VarCurr)&v1161(VarCurr)).
% 78.29/77.64  all VarCurr (v1161(VarCurr)<->v1162(VarCurr)&v1175(VarCurr)).
% 78.29/77.64  all VarCurr (-v1175(VarCurr)<->v1176(VarCurr)).
% 78.29/77.64  all VarCurr (v1176(VarCurr)<->v1177(VarCurr)|v1178(VarCurr)).
% 78.29/77.64  all VarCurr (v1178(VarCurr)<-> ($T<->v384(VarCurr,bitIndex2))).
% 78.29/77.64  all VarCurr (v1177(VarCurr)<-> ($T<->v384(VarCurr,bitIndex1))).
% 78.29/77.64  all VarCurr (v1162(VarCurr)<->v1163(VarCurr)&v1174(VarCurr)).
% 78.29/77.64  all VarCurr (v1174(VarCurr)<-> ($T<->v384(VarCurr,bitIndex3))).
% 78.29/77.64  all VarCurr (v1163(VarCurr)<->v24(VarCurr)|v1164(VarCurr)).
% 78.29/77.64  all VarCurr (v1164(VarCurr)<->v1165(VarCurr)&v1173(VarCurr)).
% 78.29/77.64  all VarCurr (-v1173(VarCurr)<->v24(VarCurr)).
% 78.29/77.64  all VarCurr (v1165(VarCurr)<->v1166(VarCurr)&v1170(VarCurr)).
% 78.29/77.64  all VarCurr (-v1170(VarCurr)<->v1171(VarCurr)).
% 78.29/77.64  all VarCurr (v1171(VarCurr)<->v1172(VarCurr)&v1088(VarCurr)).
% 78.29/77.64  all VarCurr (v1172(VarCurr)<->v768(VarCurr)&v1083(VarCurr)).
% 78.29/77.64  all VarCurr (v1166(VarCurr)<->v1167(VarCurr)&v1169(VarCurr)).
% 78.29/77.64  all VarCurr (-v1169(VarCurr)<->v452(VarCurr)).
% 78.29/77.64  all VarCurr (v1167(VarCurr)<->v768(VarCurr)&v1168(VarCurr)).
% 78.29/77.64  all VarCurr (-v1168(VarCurr)<->v772(VarCurr)).
% 78.29/77.64  all VarCurr (-v1160(VarCurr)<->v1158(VarCurr)).
% 78.29/77.64  all VarCurr (v1157(VarCurr)<->v24(VarCurr)&v1158(VarCurr)).
% 78.29/77.64  all VarCurr (v1158(VarCurr)<-> ($T<->v384(VarCurr,bitIndex0))).
% 78.29/77.64  v384(constB0,bitIndex7)<->$F.
% 78.29/77.64  v384(constB0,bitIndex6)<->$F.
% 78.29/77.64  v384(constB0,bitIndex5)<->$F.
% 78.29/77.64  v384(constB0,bitIndex4)<->$F.
% 78.29/77.64  v384(constB0,bitIndex3)<->$F.
% 78.29/77.64  v384(constB0,bitIndex2)<->$F.
% 78.29/77.64  v384(constB0,bitIndex1)<->$F.
% 78.29/77.64  -b0000000(bitIndex6).
% 78.29/77.64  -b0000000(bitIndex5).
% 78.29/77.64  -b0000000(bitIndex4).
% 78.29/77.64  -b0000000(bitIndex3).
% 78.29/77.64  -b0000000(bitIndex2).
% 78.29/77.64  -b0000000(bitIndex1).
% 78.29/77.64  -b0000000(bitIndex0).
% 78.29/77.64  v384(constB0,bitIndex0)<->$T.
% 78.29/77.64  all VarCurr (v1139(VarCurr,bitIndex16)<->v1140(VarCurr,bitIndex16)).
% 78.29/77.64  all VarCurr (v1140(VarCurr,bitIndex0)<->v1152(VarCurr)).
% 78.29/77.64  all VarCurr (v1140(VarCurr,bitIndex1)<->v1151(VarCurr)).
% 78.29/77.64  all VarCurr (v1140(VarCurr,bitIndex2)<->v1150(VarCurr)).
% 78.29/77.64  all VarCurr (v1140(VarCurr,bitIndex3)<->v1149(VarCurr)).
% 78.29/77.64  all VarCurr (v1140(VarCurr,bitIndex4)<->v1148(VarCurr)).
% 78.29/77.64  all VarCurr ((v1140(VarCurr,bitIndex9)<->v1144(VarCurr,bitIndex4))& (v1140(VarCurr,bitIndex8)<->v1144(VarCurr,bitIndex3))& (v1140(VarCurr,bitIndex7)<->v1144(VarCurr,bitIndex2))& (v1140(VarCurr,bitIndex6)<->v1144(VarCurr,bitIndex1))& (v1140(VarCurr,bitIndex5)<->v1144(VarCurr,bitIndex0))).
% 78.29/77.64  all VarCurr ((v1140(VarCurr,bitIndex17)<->v1141(VarCurr,bitIndex7))& (v1140(VarCurr,bitIndex16)<->v1141(VarCurr,bitIndex6))& (v1140(VarCurr,bitIndex15)<->v1141(VarCurr,bitIndex5))& (v1140(VarCurr,bitIndex14)<->v1141(VarCurr,bitIndex4))& (v1140(VarCurr,bitIndex13)<->v1141(VarCurr,bitIndex3))& (v1140(VarCurr,bitIndex12)<->v1141(VarCurr,bitIndex2))& (v1140(VarCurr,bitIndex11)<->v1141(VarCurr,bitIndex1))& (v1140(VarCurr,bitIndex10)<->v1141(VarCurr,bitIndex0))).
% 78.29/77.64  all VarCurr (v1144(VarCurr,bitIndex0)<->v1147(VarCurr,bitIndex0)).
% 78.29/77.64  all VarCurr ((v1144(VarCurr,bitIndex2)<->$F)& (v1144(VarCurr,bitIndex1)<->$F)).
% 78.29/77.64  all VarCurr (v1144(VarCurr,bitIndex3)<->v1147(VarCurr,bitIndex3)).
% 78.29/77.64  all VarCurr (v1144(VarCurr,bitIndex4)<->v1146(VarCurr,bitIndex4)).
% 78.29/77.64  all VarCurr B (range_3_0(B)-> (v1141(VarCurr,B)<->v1143(VarCurr,B))).
% 78.29/77.64  all VarCurr ((v1141(VarCurr,bitIndex7)<->$F)& (v1141(VarCurr,bitIndex6)<->$F)& (v1141(VarCurr,bitIndex5)<->$F)& (v1141(VarCurr,bitIndex4)<->$F)).
% 78.29/77.64  all VarCurr (v1124(VarCurr,bitIndex16)<->v1125(VarCurr,bitIndex16)).
% 78.29/77.64  all VarCurr (v1125(VarCurr,bitIndex0)<->v1137(VarCurr)).
% 78.29/77.64  all VarCurr (v1125(VarCurr,bitIndex1)<->v1136(VarCurr)).
% 78.29/77.64  all VarCurr (v1125(VarCurr,bitIndex2)<->v1135(VarCurr)).
% 78.29/77.64  all VarCurr (v1125(VarCurr,bitIndex3)<->v1134(VarCurr)).
% 78.29/77.64  all VarCurr (v1125(VarCurr,bitIndex4)<->v1133(VarCurr)).
% 78.29/77.64  all VarCurr ((v1125(VarCurr,bitIndex9)<->v1129(VarCurr,bitIndex4))& (v1125(VarCurr,bitIndex8)<->v1129(VarCurr,bitIndex3))& (v1125(VarCurr,bitIndex7)<->v1129(VarCurr,bitIndex2))& (v1125(VarCurr,bitIndex6)<->v1129(VarCurr,bitIndex1))& (v1125(VarCurr,bitIndex5)<->v1129(VarCurr,bitIndex0))).
% 78.29/77.64  all VarCurr ((v1125(VarCurr,bitIndex17)<->v1126(VarCurr,bitIndex7))& (v1125(VarCurr,bitIndex16)<->v1126(VarCurr,bitIndex6))& (v1125(VarCurr,bitIndex15)<->v1126(VarCurr,bitIndex5))& (v1125(VarCurr,bitIndex14)<->v1126(VarCurr,bitIndex4))& (v1125(VarCurr,bitIndex13)<->v1126(VarCurr,bitIndex3))& (v1125(VarCurr,bitIndex12)<->v1126(VarCurr,bitIndex2))& (v1125(VarCurr,bitIndex11)<->v1126(VarCurr,bitIndex1))& (v1125(VarCurr,bitIndex10)<->v1126(VarCurr,bitIndex0))).
% 78.29/77.64  all VarCurr (v1129(VarCurr,bitIndex0)<->v1132(VarCurr,bitIndex0)).
% 78.29/77.64  all VarCurr ((v1129(VarCurr,bitIndex2)<->$F)& (v1129(VarCurr,bitIndex1)<->$F)).
% 78.29/77.64  all VarCurr (v1129(VarCurr,bitIndex3)<->v1132(VarCurr,bitIndex3)).
% 78.29/77.64  all VarCurr (v1129(VarCurr,bitIndex4)<->v1131(VarCurr,bitIndex4)).
% 78.29/77.64  all VarCurr B (range_3_0(B)-> (v1126(VarCurr,B)<->v1128(VarCurr,B))).
% 78.29/77.64  all VarCurr ((v1126(VarCurr,bitIndex7)<->$F)& (v1126(VarCurr,bitIndex6)<->$F)& (v1126(VarCurr,bitIndex5)<->$F)& (v1126(VarCurr,bitIndex4)<->$F)).
% 78.29/77.64  all VarCurr (v1109(VarCurr,bitIndex16)<->v1110(VarCurr,bitIndex16)).
% 78.29/77.64  all VarCurr (v1110(VarCurr,bitIndex0)<->v1122(VarCurr)).
% 78.29/77.64  all VarCurr (v1110(VarCurr,bitIndex1)<->v1121(VarCurr)).
% 78.29/77.64  all VarCurr (v1110(VarCurr,bitIndex2)<->v1120(VarCurr)).
% 78.29/77.64  all VarCurr (v1110(VarCurr,bitIndex3)<->v1119(VarCurr)).
% 78.29/77.64  all VarCurr (v1110(VarCurr,bitIndex4)<->v1118(VarCurr)).
% 78.29/77.64  all VarCurr ((v1110(VarCurr,bitIndex9)<->v1114(VarCurr,bitIndex4))& (v1110(VarCurr,bitIndex8)<->v1114(VarCurr,bitIndex3))& (v1110(VarCurr,bitIndex7)<->v1114(VarCurr,bitIndex2))& (v1110(VarCurr,bitIndex6)<->v1114(VarCurr,bitIndex1))& (v1110(VarCurr,bitIndex5)<->v1114(VarCurr,bitIndex0))).
% 78.29/77.64  all VarCurr ((v1110(VarCurr,bitIndex17)<->v1111(VarCurr,bitIndex7))& (v1110(VarCurr,bitIndex16)<->v1111(VarCurr,bitIndex6))& (v1110(VarCurr,bitIndex15)<->v1111(VarCurr,bitIndex5))& (v1110(VarCurr,bitIndex14)<->v1111(VarCurr,bitIndex4))& (v1110(VarCurr,bitIndex13)<->v1111(VarCurr,bitIndex3))& (v1110(VarCurr,bitIndex12)<->v1111(VarCurr,bitIndex2))& (v1110(VarCurr,bitIndex11)<->v1111(VarCurr,bitIndex1))& (v1110(VarCurr,bitIndex10)<->v1111(VarCurr,bitIndex0))).
% 78.29/77.64  all VarCurr (v1114(VarCurr,bitIndex0)<->v1117(VarCurr,bitIndex0)).
% 78.29/77.64  all VarCurr ((v1114(VarCurr,bitIndex2)<->$F)& (v1114(VarCurr,bitIndex1)<->$F)).
% 78.29/77.64  all VarCurr (v1114(VarCurr,bitIndex3)<->v1117(VarCurr,bitIndex3)).
% 78.29/77.64  all VarCurr (v1114(VarCurr,bitIndex4)<->v1116(VarCurr,bitIndex4)).
% 78.29/77.64  all VarCurr B (range_3_0(B)-> (v1111(VarCurr,B)<->v1113(VarCurr,B))).
% 78.29/77.64  all VarCurr ((v1111(VarCurr,bitIndex7)<->$F)& (v1111(VarCurr,bitIndex6)<->$F)& (v1111(VarCurr,bitIndex5)<->$F)& (v1111(VarCurr,bitIndex4)<->$F)).
% 78.29/77.64  all VarCurr (v1094(VarCurr,bitIndex16)<->v1095(VarCurr,bitIndex16)).
% 78.29/77.64  all VarCurr (v1095(VarCurr,bitIndex0)<->v1107(VarCurr)).
% 78.29/77.64  all VarCurr (v1095(VarCurr,bitIndex1)<->v1106(VarCurr)).
% 78.29/77.64  all VarCurr (v1095(VarCurr,bitIndex2)<->v1105(VarCurr)).
% 78.29/77.64  all VarCurr (v1095(VarCurr,bitIndex3)<->v1104(VarCurr)).
% 78.29/77.64  all VarCurr (v1095(VarCurr,bitIndex4)<->v1103(VarCurr)).
% 78.29/77.64  all VarCurr ((v1095(VarCurr,bitIndex9)<->v1099(VarCurr,bitIndex4))& (v1095(VarCurr,bitIndex8)<->v1099(VarCurr,bitIndex3))& (v1095(VarCurr,bitIndex7)<->v1099(VarCurr,bitIndex2))& (v1095(VarCurr,bitIndex6)<->v1099(VarCurr,bitIndex1))& (v1095(VarCurr,bitIndex5)<->v1099(VarCurr,bitIndex0))).
% 78.29/77.64  all VarCurr ((v1095(VarCurr,bitIndex17)<->v1096(VarCurr,bitIndex7))& (v1095(VarCurr,bitIndex16)<->v1096(VarCurr,bitIndex6))& (v1095(VarCurr,bitIndex15)<->v1096(VarCurr,bitIndex5))& (v1095(VarCurr,bitIndex14)<->v1096(VarCurr,bitIndex4))& (v1095(VarCurr,bitIndex13)<->v1096(VarCurr,bitIndex3))& (v1095(VarCurr,bitIndex12)<->v1096(VarCurr,bitIndex2))& (v1095(VarCurr,bitIndex11)<->v1096(VarCurr,bitIndex1))& (v1095(VarCurr,bitIndex10)<->v1096(VarCurr,bitIndex0))).
% 78.29/77.64  all VarCurr (v1099(VarCurr,bitIndex0)<->v1102(VarCurr,bitIndex0)).
% 78.29/77.64  all VarCurr ((v1099(VarCurr,bitIndex2)<->$F)& (v1099(VarCurr,bitIndex1)<->$F)).
% 78.29/77.64  all VarCurr (v1099(VarCurr,bitIndex3)<->v1102(VarCurr,bitIndex3)).
% 78.29/77.64  all VarCurr (v1099(VarCurr,bitIndex4)<->v1101(VarCurr,bitIndex4)).
% 78.29/77.64  all VarCurr B (range_3_0(B)-> (v1096(VarCurr,B)<->v1098(VarCurr,B))).
% 78.29/77.64  all VarCurr ((v1096(VarCurr,bitIndex7)<->$F)& (v1096(VarCurr,bitIndex6)<->$F)& (v1096(VarCurr,bitIndex5)<->$F)& (v1096(VarCurr,bitIndex4)<->$F)).
% 78.29/77.64  all VarCurr (v1088(VarCurr)<->v1092(VarCurr)&v1090(VarCurr)).
% 78.29/77.64  all VarCurr (-v1092(VarCurr)<->v338(VarCurr)).
% 78.29/77.64  all VarCurr (v1090(VarCurr)<->v320(VarCurr)).
% 78.29/77.64  all VarCurr (-v1083(VarCurr)<->v1085(VarCurr)).
% 78.29/77.64  all VarCurr (v1085(VarCurr)<->v380(VarCurr,bitIndex0)|v380(VarCurr,bitIndex1)).
% 78.29/77.64  all VarCurr (v768(VarCurr)<->v1078(VarCurr)|v770(VarCurr,bitIndex5)).
% 78.29/77.64  all VarCurr (v1078(VarCurr)<->v1079(VarCurr)|v770(VarCurr,bitIndex4)).
% 78.29/77.64  all VarCurr (v1079(VarCurr)<->v1080(VarCurr)|v770(VarCurr,bitIndex3)).
% 78.29/77.64  all VarCurr (v1080(VarCurr)<->v1081(VarCurr)|v770(VarCurr,bitIndex2)).
% 78.29/77.64  all VarCurr (v1081(VarCurr)<->v770(VarCurr,bitIndex0)|v770(VarCurr,bitIndex1)).
% 78.29/77.64  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v981(VarNext)-> (all B (range_5_0(B)-> (v770(VarNext,B)<->v770(VarCurr,B)))))).
% 78.29/77.64  all VarNext (v981(VarNext)-> (all B (range_5_0(B)-> (v770(VarNext,B)<->v1002(VarNext,B))))).
% 78.29/77.64  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_5_0(B)-> (v1002(VarNext,B)<->v1000(VarCurr,B))))).
% 78.29/77.64  all VarCurr (-v990(VarCurr)-> (all B (range_5_0(B)-> (v1000(VarCurr,B)<->v1003(VarCurr,B))))).
% 78.29/77.64  all VarCurr (v990(VarCurr)-> (all B (range_5_0(B)-> (v1000(VarCurr,B)<->$F)))).
% 78.29/77.64  all VarCurr (-v993(VarCurr)-> (all B (range_5_0(B)-> (v1003(VarCurr,B)<->v1039(VarCurr,B))))).
% 78.29/77.64  all VarCurr (v993(VarCurr)-> (all B (range_5_0(B)-> (v1003(VarCurr,B)<->v1004(VarCurr,B))))).
% 78.29/77.64  all VarCurr (v1039(VarCurr,bitIndex0)<->v1072(VarCurr)).
% 78.29/77.64  all VarCurr (v1039(VarCurr,bitIndex1)<->v1070(VarCurr)).
% 78.29/77.64  all VarCurr (v1039(VarCurr,bitIndex2)<->v1066(VarCurr)).
% 78.29/77.64  all VarCurr (v1039(VarCurr,bitIndex3)<->v1062(VarCurr)).
% 78.29/77.64  all VarCurr (v1039(VarCurr,bitIndex4)<->v1058(VarCurr)).
% 78.29/77.64  all VarCurr (v1039(VarCurr,bitIndex5)<->v1041(VarCurr)).
% 78.29/77.64  all VarCurr (v1070(VarCurr)<->v1071(VarCurr)&v1073(VarCurr)).
% 78.29/77.64  all VarCurr (v1073(VarCurr)<->v770(VarCurr,bitIndex0)|v1052(VarCurr)).
% 78.29/77.64  all VarCurr (v1071(VarCurr)<->v1072(VarCurr)|v770(VarCurr,bitIndex1)).
% 78.29/77.64  all VarCurr (-v1072(VarCurr)<->v770(VarCurr,bitIndex0)).
% 78.29/77.64  all VarCurr (v1066(VarCurr)<->v1067(VarCurr)&v1069(VarCurr)).
% 78.29/77.64  all VarCurr (v1069(VarCurr)<->v1050(VarCurr)|v1053(VarCurr)).
% 78.29/77.64  all VarCurr (v1067(VarCurr)<->v1068(VarCurr)|v770(VarCurr,bitIndex2)).
% 78.29/77.64  all VarCurr (-v1068(VarCurr)<->v1050(VarCurr)).
% 78.29/77.64  all VarCurr (v1062(VarCurr)<->v1063(VarCurr)&v1065(VarCurr)).
% 78.29/77.64  all VarCurr (v1065(VarCurr)<->v1048(VarCurr)|v1054(VarCurr)).
% 78.29/77.64  all VarCurr (v1063(VarCurr)<->v1064(VarCurr)|v770(VarCurr,bitIndex3)).
% 78.29/77.64  all VarCurr (-v1064(VarCurr)<->v1048(VarCurr)).
% 78.29/77.64  all VarCurr (v1058(VarCurr)<->v1059(VarCurr)&v1061(VarCurr)).
% 78.29/77.64  all VarCurr (v1061(VarCurr)<->v1046(VarCurr)|v1055(VarCurr)).
% 78.29/77.64  all VarCurr (v1059(VarCurr)<->v1060(VarCurr)|v770(VarCurr,bitIndex4)).
% 78.29/77.64  all VarCurr (-v1060(VarCurr)<->v1046(VarCurr)).
% 78.29/77.64  all VarCurr (v1041(VarCurr)<->v1042(VarCurr)&v1056(VarCurr)).
% 78.29/77.64  all VarCurr (v1056(VarCurr)<->v1044(VarCurr)|v1057(VarCurr)).
% 78.29/77.64  all VarCurr (-v1057(VarCurr)<->v770(VarCurr,bitIndex5)).
% 78.29/77.64  all VarCurr (v1042(VarCurr)<->v1043(VarCurr)|v770(VarCurr,bitIndex5)).
% 78.29/77.64  all VarCurr (-v1043(VarCurr)<->v1044(VarCurr)).
% 78.29/77.64  all VarCurr (v1044(VarCurr)<->v770(VarCurr,bitIndex4)|v1045(VarCurr)).
% 78.29/77.64  all VarCurr (v1045(VarCurr)<->v1046(VarCurr)&v1055(VarCurr)).
% 78.29/77.64  all VarCurr (-v1055(VarCurr)<->v770(VarCurr,bitIndex4)).
% 78.29/77.64  all VarCurr (v1046(VarCurr)<->v770(VarCurr,bitIndex3)|v1047(VarCurr)).
% 78.29/77.64  all VarCurr (v1047(VarCurr)<->v1048(VarCurr)&v1054(VarCurr)).
% 78.29/77.64  all VarCurr (-v1054(VarCurr)<->v770(VarCurr,bitIndex3)).
% 78.29/77.64  all VarCurr (v1048(VarCurr)<->v770(VarCurr,bitIndex2)|v1049(VarCurr)).
% 78.29/77.64  all VarCurr (v1049(VarCurr)<->v1050(VarCurr)&v1053(VarCurr)).
% 78.29/77.64  all VarCurr (-v1053(VarCurr)<->v770(VarCurr,bitIndex2)).
% 78.29/77.64  all VarCurr (v1050(VarCurr)<->v770(VarCurr,bitIndex1)|v1051(VarCurr)).
% 78.29/77.64  all VarCurr (v1051(VarCurr)<->v770(VarCurr,bitIndex0)&v1052(VarCurr)).
% 78.29/77.64  all VarCurr (-v1052(VarCurr)<->v770(VarCurr,bitIndex1)).
% 78.29/77.64  all VarCurr (v1004(VarCurr,bitIndex0)<->v1037(VarCurr)).
% 78.29/77.64  all VarCurr (v1004(VarCurr,bitIndex1)<->v1035(VarCurr)).
% 78.29/77.64  all VarCurr (v1004(VarCurr,bitIndex2)<->v1031(VarCurr)).
% 78.29/77.64  all VarCurr (v1004(VarCurr,bitIndex3)<->v1027(VarCurr)).
% 78.29/77.64  all VarCurr (v1004(VarCurr,bitIndex4)<->v1023(VarCurr)).
% 78.29/77.64  all VarCurr (v1004(VarCurr,bitIndex5)<->v1006(VarCurr)).
% 78.29/77.64  all VarCurr (v1035(VarCurr)<->v1036(VarCurr)&v1038(VarCurr)).
% 78.29/77.65  all VarCurr (v1038(VarCurr)<->v775(VarCurr,bitIndex0)|v1017(VarCurr)).
% 78.29/77.65  all VarCurr (v1036(VarCurr)<->v1037(VarCurr)|v775(VarCurr,bitIndex1)).
% 78.29/77.65  all VarCurr (-v1037(VarCurr)<->v775(VarCurr,bitIndex0)).
% 78.29/77.65  all VarCurr (v1031(VarCurr)<->v1032(VarCurr)&v1034(VarCurr)).
% 78.29/77.65  all VarCurr (v1034(VarCurr)<->v1015(VarCurr)|v1018(VarCurr)).
% 78.29/77.65  all VarCurr (v1032(VarCurr)<->v1033(VarCurr)|v775(VarCurr,bitIndex2)).
% 78.29/77.65  all VarCurr (-v1033(VarCurr)<->v1015(VarCurr)).
% 78.29/77.65  all VarCurr (v1027(VarCurr)<->v1028(VarCurr)&v1030(VarCurr)).
% 78.29/77.65  all VarCurr (v1030(VarCurr)<->v1013(VarCurr)|v1019(VarCurr)).
% 78.29/77.65  all VarCurr (v1028(VarCurr)<->v1029(VarCurr)|v775(VarCurr,bitIndex3)).
% 78.29/77.65  all VarCurr (-v1029(VarCurr)<->v1013(VarCurr)).
% 78.29/77.65  all VarCurr (v1023(VarCurr)<->v1024(VarCurr)&v1026(VarCurr)).
% 78.29/77.65  all VarCurr (v1026(VarCurr)<->v1011(VarCurr)|v1020(VarCurr)).
% 78.29/77.65  all VarCurr (v1024(VarCurr)<->v1025(VarCurr)|v775(VarCurr,bitIndex4)).
% 78.29/77.65  all VarCurr (-v1025(VarCurr)<->v1011(VarCurr)).
% 78.29/77.65  all VarCurr (v1006(VarCurr)<->v1007(VarCurr)&v1021(VarCurr)).
% 78.29/77.65  all VarCurr (v1021(VarCurr)<->v1009(VarCurr)|v1022(VarCurr)).
% 78.29/77.65  all VarCurr (-v1022(VarCurr)<->v775(VarCurr,bitIndex5)).
% 78.29/77.65  all VarCurr (v1007(VarCurr)<->v1008(VarCurr)|v775(VarCurr,bitIndex5)).
% 78.29/77.65  all VarCurr (-v1008(VarCurr)<->v1009(VarCurr)).
% 78.29/77.65  all VarCurr (v1009(VarCurr)<->v775(VarCurr,bitIndex4)|v1010(VarCurr)).
% 78.29/77.65  all VarCurr (v1010(VarCurr)<->v1011(VarCurr)&v1020(VarCurr)).
% 78.29/77.65  all VarCurr (-v1020(VarCurr)<->v775(VarCurr,bitIndex4)).
% 78.29/77.65  all VarCurr (v1011(VarCurr)<->v775(VarCurr,bitIndex3)|v1012(VarCurr)).
% 78.29/77.65  all VarCurr (v1012(VarCurr)<->v1013(VarCurr)&v1019(VarCurr)).
% 78.29/77.65  all VarCurr (-v1019(VarCurr)<->v775(VarCurr,bitIndex3)).
% 78.29/77.65  all VarCurr (v1013(VarCurr)<->v775(VarCurr,bitIndex2)|v1014(VarCurr)).
% 78.29/77.65  all VarCurr (v1014(VarCurr)<->v1015(VarCurr)&v1018(VarCurr)).
% 78.29/77.65  all VarCurr (-v1018(VarCurr)<->v775(VarCurr,bitIndex2)).
% 78.29/77.65  all VarCurr (v1015(VarCurr)<->v775(VarCurr,bitIndex1)|v1016(VarCurr)).
% 78.29/77.65  all VarCurr (v1016(VarCurr)<->v775(VarCurr,bitIndex0)&v1017(VarCurr)).
% 78.29/77.65  all VarCurr (-v1017(VarCurr)<->v775(VarCurr,bitIndex1)).
% 78.29/77.65  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v981(VarNext)<->v982(VarNext)&v989(VarNext))).
% 78.29/77.65  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v989(VarNext)<->v987(VarCurr))).
% 78.29/77.65  all VarCurr (v987(VarCurr)<->v990(VarCurr)|v991(VarCurr)).
% 78.29/77.65  all VarCurr (v991(VarCurr)<->v992(VarCurr)&v999(VarCurr)).
% 78.29/77.65  all VarCurr (-v999(VarCurr)<->v990(VarCurr)).
% 78.29/77.65  all VarCurr (v992(VarCurr)<->v993(VarCurr)|v995(VarCurr)).
% 78.29/77.65  all VarCurr (v995(VarCurr)<->v996(VarCurr)&v998(VarCurr)).
% 78.29/77.65  all VarCurr (-v998(VarCurr)<->v993(VarCurr)).
% 78.29/77.65  all VarCurr (v996(VarCurr)<->v382(VarCurr)&v997(VarCurr)).
% 78.29/77.65  all VarCurr (-v997(VarCurr)<->v772(VarCurr)).
% 78.29/77.65  all VarCurr (v993(VarCurr)<->v24(VarCurr)&v994(VarCurr)).
% 78.29/77.65  all VarCurr (-v994(VarCurr)<->v340(VarCurr)).
% 78.29/77.65  all VarCurr (-v990(VarCurr)<->v15(VarCurr)).
% 78.29/77.65  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v982(VarNext)<->v983(VarNext)&v355(VarNext))).
% 78.29/77.65  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v983(VarNext)<->v362(VarNext))).
% 78.29/77.65  all B (range_5_0(B)-> (v770(constB0,B)<->$F)).
% 78.29/77.65  all VarCurr (v775(VarCurr,bitIndex5)<->v776(VarCurr,bitIndex5)).
% 78.29/77.65  all VarCurr (v777(VarCurr,bitIndex3)<->v911(VarCurr,bitIndex3)).
% 78.29/77.65  all VarCurr (v784(VarCurr,bitIndex7)<->v801(VarCurr,bitIndex7)).
% 78.29/77.65  all VarCurr (v460(VarCurr,bitIndex7)<->v462(VarCurr,bitIndex7)).
% 78.29/77.65  all VarCurr (v462(VarCurr,bitIndex7)<->v464(VarCurr,bitIndex7)).
% 78.29/77.65  all VarCurr (v464(VarCurr,bitIndex7)<->v466(VarCurr,bitIndex7)).
% 78.29/77.65  all VarCurr (v466(VarCurr,bitIndex7)<->v42(VarCurr,bitIndex101)).
% 78.29/77.65  all VarCurr (v42(VarCurr,bitIndex101)<->v44(VarCurr,bitIndex101)).
% 78.29/77.65  all VarCurr (v44(VarCurr,bitIndex101)<->v46(VarCurr,bitIndex101)).
% 78.29/77.65  all VarCurr (v46(VarCurr,bitIndex101)<->v48(VarCurr,bitIndex681)).
% 78.29/77.65  all VarNext (v48(VarNext,bitIndex681)<->v972(VarNext,bitIndex101)).
% 78.29/77.65  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v974(VarNext)-> (v972(VarNext,bitIndex115)<->v48(VarCurr,bitIndex695))& (v972(VarNext,bitIndex114)<->v48(VarCurr,bitIndex694))& (v972(VarNext,bitIndex113)<->v48(VarCurr,bitIndex693))& (v972(VarNext,bitIndex112)<->v48(VarCurr,bitIndex692))& (v972(VarNext,bitIndex111)<->v48(VarCurr,bitIndex691))& (v972(VarNext,bitIndex110)<->v48(VarCurr,bitIndex690))& (v972(VarNext,bitIndex109)<->v48(VarCurr,bitIndex689))& (v972(VarNext,bitIndex108)<->v48(VarCurr,bitIndex688))& (v972(VarNext,bitIndex107)<->v48(VarCurr,bitIndex687))& (v972(VarNext,bitIndex106)<->v48(VarCurr,bitIndex686))& (v972(VarNext,bitIndex105)<->v48(VarCurr,bitIndex685))& (v972(VarNext,bitIndex104)<->v48(VarCurr,bitIndex684))& (v972(VarNext,bitIndex103)<->v48(VarCurr,bitIndex683))& (v972(VarNext,bitIndex102)<->v48(VarCurr,bitIndex682))& (v972(VarNext,bitIndex101)<->v48(VarCurr,bitIndex681))& (v972(VarNext,bitIndex100)<->v48(VarCurr,bitIndex680))& (v972(VarNext,bitIndex99)<->v48(VarCurr,bitIndex679))& (v972(VarNext,bitIndex98)<->v48(VarCurr,bitIndex678))& (v972(VarNext,bitIndex97)<->v48(VarCurr,bitIndex677))& (v972(VarNext,bitIndex96)<->v48(VarCurr,bitIndex676))& (v972(VarNext,bitIndex95)<->v48(VarCurr,bitIndex675))& (v972(VarNext,bitIndex94)<->v48(VarCurr,bitIndex674))& (v972(VarNext,bitIndex93)<->v48(VarCurr,bitIndex673))& (v972(VarNext,bitIndex92)<->v48(VarCurr,bitIndex672))& (v972(VarNext,bitIndex91)<->v48(VarCurr,bitIndex671))& (v972(VarNext,bitIndex90)<->v48(VarCurr,bitIndex670))& (v972(VarNext,bitIndex89)<->v48(VarCurr,bitIndex669))& (v972(VarNext,bitIndex88)<->v48(VarCurr,bitIndex668))& (v972(VarNext,bitIndex87)<->v48(VarCurr,bitIndex667))& (v972(VarNext,bitIndex86)<->v48(VarCurr,bitIndex666))& (v972(VarNext,bitIndex85)<->v48(VarCurr,bitIndex665))& (v972(VarNext,bitIndex84)<->v48(VarCurr,bitIndex664))& (v972(VarNext,bitIndex83)<->v48(VarCurr,bitIndex663))& (v972(VarNext,bitIndex82)<->v48(VarCurr,bitIndex662))& (v972(VarNext,bitIndex81)<->v48(VarCurr,bitIndex661))& (v972(VarNext,bitIndex80)<->v48(VarCurr,bitIndex660))& (v972(VarNext,bitIndex79)<->v48(VarCurr,bitIndex659))& (v972(VarNext,bitIndex78)<->v48(VarCurr,bitIndex658))& (v972(VarNext,bitIndex77)<->v48(VarCurr,bitIndex657))& (v972(VarNext,bitIndex76)<->v48(VarCurr,bitIndex656))& (v972(VarNext,bitIndex75)<->v48(VarCurr,bitIndex655))& (v972(VarNext,bitIndex74)<->v48(VarCurr,bitIndex654))& (v972(VarNext,bitIndex73)<->v48(VarCurr,bitIndex653))& (v972(VarNext,bitIndex72)<->v48(VarCurr,bitIndex652))& (v972(VarNext,bitIndex71)<->v48(VarCurr,bitIndex651))& (v972(VarNext,bitIndex70)<->v48(VarCurr,bitIndex650))& (v972(VarNext,bitIndex69)<->v48(VarCurr,bitIndex649))& (v972(VarNext,bitIndex68)<->v48(VarCurr,bitIndex648))& (v972(VarNext,bitIndex67)<->v48(VarCurr,bitIndex647))& (v972(VarNext,bitIndex66)<->v48(VarCurr,bitIndex646))& (v972(VarNext,bitIndex65)<->v48(VarCurr,bitIndex645))& (v972(VarNext,bitIndex64)<->v48(VarCurr,bitIndex644))& (v972(VarNext,bitIndex63)<->v48(VarCurr,bitIndex643))& (v972(VarNext,bitIndex62)<->v48(VarCurr,bitIndex642))& (v972(VarNext,bitIndex61)<->v48(VarCurr,bitIndex641))& (v972(VarNext,bitIndex60)<->v48(VarCurr,bitIndex640))& (v972(VarNext,bitIndex59)<->v48(VarCurr,bitIndex639))& (v972(VarNext,bitIndex58)<->v48(VarCurr,bitIndex638))& (v972(VarNext,bitIndex57)<->v48(VarCurr,bitIndex637))& (v972(VarNext,bitIndex56)<->v48(VarCurr,bitIndex636))& (v972(VarNext,bitIndex55)<->v48(VarCurr,bitIndex635))& (v972(VarNext,bitIndex54)<->v48(VarCurr,bitIndex634))& (v972(VarNext,bitIndex53)<->v48(VarCurr,bitIndex633))& (v972(VarNext,bitIndex52)<->v48(VarCurr,bitIndex632))& (v972(VarNext,bitIndex51)<->v48(VarCurr,bitIndex631))& (v972(VarNext,bitIndex50)<->v48(VarCurr,bitIndex630))& (v972(VarNext,bitIndex49)<->v48(VarCurr,bitIndex629))& (v972(VarNext,bitIndex48)<->v48(VarCurr,bitIndex628))& (v972(VarNext,bitIndex47)<->v48(VarCurr,bitIndex627))& (v972(VarNext,bitIndex46)<->v48(VarCurr,bitIndex626))& (v972(VarNext,bitIndex45)<->v48(VarCurr,bitIndex625))& (v972(VarNext,bitIndex44)<->v48(VarCurr,bitIndex624))& (v972(VarNext,bitIndex43)<->v48(VarCurr,bitIndex623))& (v972(VarNext,bitIndex42)<->v48(VarCurr,bitIndex622))& (v972(VarNext,bitIndex41)<->v48(VarCurr,bitIndex621))& (v972(VarNext,bitIndex40)<->v48(VarCurr,bitIndex620))& (v972(VarNext,bitIndex39)<->v48(VarCurr,bitIndex619))& (v972(VarNext,bitIndex38)<->v48(VarCurr,bitIndex618))& (v972(VarNext,bitIndex37)<->v48(VarCurr,bitIndex617))& (v972(VarNext,bitIndex36)<->v48(VarCurr,bitIndex616))& (v972(VarNext,bitIndex35)<->v48(VarCurr,bitIndex615))& (v972(VarNext,bitIndex34)<->v48(VarCurr,bitIndex614))& (v972(VarNext,bitIndex33)<->v48(VarCurr,bitIndex613))& (v972(VarNext,bitIndex32)<->v48(VarCurr,bitIndex612))& (v972(VarNext,bitIndex31)<->v48(VarCurr,bitIndex611))& (v972(VarNext,bitIndex30)<->v48(VarCurr,bitIndex610))& (v972(VarNext,bitIndex29)<->v48(VarCurr,bitIndex609))& (v972(VarNext,bitIndex28)<->v48(VarCurr,bitIndex608))& (v972(VarNext,bitIndex27)<->v48(VarCurr,bitIndex607))& (v972(VarNext,bitIndex26)<->v48(VarCurr,bitIndex606))& (v972(VarNext,bitIndex25)<->v48(VarCurr,bitIndex605))& (v972(VarNext,bitIndex24)<->v48(VarCurr,bitIndex604))& (v972(VarNext,bitIndex23)<->v48(VarCurr,bitIndex603))& (v972(VarNext,bitIndex22)<->v48(VarCurr,bitIndex602))& (v972(VarNext,bitIndex21)<->v48(VarCurr,bitIndex601))& (v972(VarNext,bitIndex20)<->v48(VarCurr,bitIndex600))& (v972(VarNext,bitIndex19)<->v48(VarCurr,bitIndex599))& (v972(VarNext,bitIndex18)<->v48(VarCurr,bitIndex598))& (v972(VarNext,bitIndex17)<->v48(VarCurr,bitIndex597))& (v972(VarNext,bitIndex16)<->v48(VarCurr,bitIndex596))& (v972(VarNext,bitIndex15)<->v48(VarCurr,bitIndex595))& (v972(VarNext,bitIndex14)<->v48(VarCurr,bitIndex594))& (v972(VarNext,bitIndex13)<->v48(VarCurr,bitIndex593))& (v972(VarNext,bitIndex12)<->v48(VarCurr,bitIndex592))& (v972(VarNext,bitIndex11)<->v48(VarCurr,bitIndex591))& (v972(VarNext,bitIndex10)<->v48(VarCurr,bitIndex590))& (v972(VarNext,bitIndex9)<->v48(VarCurr,bitIndex589))& (v972(VarNext,bitIndex8)<->v48(VarCurr,bitIndex588))& (v972(VarNext,bitIndex7)<->v48(VarCurr,bitIndex587))& (v972(VarNext,bitIndex6)<->v48(VarCurr,bitIndex586))& (v972(VarNext,bitIndex5)<->v48(VarCurr,bitIndex585))& (v972(VarNext,bitIndex4)<->v48(VarCurr,bitIndex584))& (v972(VarNext,bitIndex3)<->v48(VarCurr,bitIndex583))& (v972(VarNext,bitIndex2)<->v48(VarCurr,bitIndex582))& (v972(VarNext,bitIndex1)<->v48(VarCurr,bitIndex581))& (v972(VarNext,bitIndex0)<->v48(VarCurr,bitIndex580)))).
% 78.29/77.65  all VarNext (v974(VarNext)-> (all B (range_115_0(B)-> (v972(VarNext,B)<->v238(VarNext,B))))).
% 78.29/77.65  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v974(VarNext)<->v975(VarNext)&v233(VarNext))).
% 78.29/77.65  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v975(VarNext)<->v977(VarNext)&v188(VarNext))).
% 78.29/77.65  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v977(VarNext)<->v207(VarNext))).
% 78.29/77.65  all VarNext (v48(VarNext,bitIndex565)<->v964(VarNext,bitIndex101)).
% 78.29/77.65  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v966(VarNext)-> (v964(VarNext,bitIndex115)<->v48(VarCurr,bitIndex579))& (v964(VarNext,bitIndex114)<->v48(VarCurr,bitIndex578))& (v964(VarNext,bitIndex113)<->v48(VarCurr,bitIndex577))& (v964(VarNext,bitIndex112)<->v48(VarCurr,bitIndex576))& (v964(VarNext,bitIndex111)<->v48(VarCurr,bitIndex575))& (v964(VarNext,bitIndex110)<->v48(VarCurr,bitIndex574))& (v964(VarNext,bitIndex109)<->v48(VarCurr,bitIndex573))& (v964(VarNext,bitIndex108)<->v48(VarCurr,bitIndex572))& (v964(VarNext,bitIndex107)<->v48(VarCurr,bitIndex571))& (v964(VarNext,bitIndex106)<->v48(VarCurr,bitIndex570))& (v964(VarNext,bitIndex105)<->v48(VarCurr,bitIndex569))& (v964(VarNext,bitIndex104)<->v48(VarCurr,bitIndex568))& (v964(VarNext,bitIndex103)<->v48(VarCurr,bitIndex567))& (v964(VarNext,bitIndex102)<->v48(VarCurr,bitIndex566))& (v964(VarNext,bitIndex101)<->v48(VarCurr,bitIndex565))& (v964(VarNext,bitIndex100)<->v48(VarCurr,bitIndex564))& (v964(VarNext,bitIndex99)<->v48(VarCurr,bitIndex563))& (v964(VarNext,bitIndex98)<->v48(VarCurr,bitIndex562))& (v964(VarNext,bitIndex97)<->v48(VarCurr,bitIndex561))& (v964(VarNext,bitIndex96)<->v48(VarCurr,bitIndex560))& (v964(VarNext,bitIndex95)<->v48(VarCurr,bitIndex559))& (v964(VarNext,bitIndex94)<->v48(VarCurr,bitIndex558))& (v964(VarNext,bitIndex93)<->v48(VarCurr,bitIndex557))& (v964(VarNext,bitIndex92)<->v48(VarCurr,bitIndex556))& (v964(VarNext,bitIndex91)<->v48(VarCurr,bitIndex555))& (v964(VarNext,bitIndex90)<->v48(VarCurr,bitIndex554))& (v964(VarNext,bitIndex89)<->v48(VarCurr,bitIndex553))& (v964(VarNext,bitIndex88)<->v48(VarCurr,bitIndex552))& (v964(VarNext,bitIndex87)<->v48(VarCurr,bitIndex551))& (v964(VarNext,bitIndex86)<->v48(VarCurr,bitIndex550))& (v964(VarNext,bitIndex85)<->v48(VarCurr,bitIndex549))& (v964(VarNext,bitIndex84)<->v48(VarCurr,bitIndex548))& (v964(VarNext,bitIndex83)<->v48(VarCurr,bitIndex547))& (v964(VarNext,bitIndex82)<->v48(VarCurr,bitIndex546))& (v964(VarNext,bitIndex81)<->v48(VarCurr,bitIndex545))& (v964(VarNext,bitIndex80)<->v48(VarCurr,bitIndex544))& (v964(VarNext,bitIndex79)<->v48(VarCurr,bitIndex543))& (v964(VarNext,bitIndex78)<->v48(VarCurr,bitIndex542))& (v964(VarNext,bitIndex77)<->v48(VarCurr,bitIndex541))& (v964(VarNext,bitIndex76)<->v48(VarCurr,bitIndex540))& (v964(VarNext,bitIndex75)<->v48(VarCurr,bitIndex539))& (v964(VarNext,bitIndex74)<->v48(VarCurr,bitIndex538))& (v964(VarNext,bitIndex73)<->v48(VarCurr,bitIndex537))& (v964(VarNext,bitIndex72)<->v48(VarCurr,bitIndex536))& (v964(VarNext,bitIndex71)<->v48(VarCurr,bitIndex535))& (v964(VarNext,bitIndex70)<->v48(VarCurr,bitIndex534))& (v964(VarNext,bitIndex69)<->v48(VarCurr,bitIndex533))& (v964(VarNext,bitIndex68)<->v48(VarCurr,bitIndex532))& (v964(VarNext,bitIndex67)<->v48(VarCurr,bitIndex531))& (v964(VarNext,bitIndex66)<->v48(VarCurr,bitIndex530))& (v964(VarNext,bitIndex65)<->v48(VarCurr,bitIndex529))& (v964(VarNext,bitIndex64)<->v48(VarCurr,bitIndex528))& (v964(VarNext,bitIndex63)<->v48(VarCurr,bitIndex527))& (v964(VarNext,bitIndex62)<->v48(VarCurr,bitIndex526))& (v964(VarNext,bitIndex61)<->v48(VarCurr,bitIndex525))& (v964(VarNext,bitIndex60)<->v48(VarCurr,bitIndex524))& (v964(VarNext,bitIndex59)<->v48(VarCurr,bitIndex523))& (v964(VarNext,bitIndex58)<->v48(VarCurr,bitIndex522))& (v964(VarNext,bitIndex57)<->v48(VarCurr,bitIndex521))& (v964(VarNext,bitIndex56)<->v48(VarCurr,bitIndex520))& (v964(VarNext,bitIndex55)<->v48(VarCurr,bitIndex519))& (v964(VarNext,bitIndex54)<->v48(VarCurr,bitIndex518))& (v964(VarNext,bitIndex53)<->v48(VarCurr,bitIndex517))& (v964(VarNext,bitIndex52)<->v48(VarCurr,bitIndex516))& (v964(VarNext,bitIndex51)<->v48(VarCurr,bitIndex515))& (v964(VarNext,bitIndex50)<->v48(VarCurr,bitIndex514))& (v964(VarNext,bitIndex49)<->v48(VarCurr,bitIndex513))& (v964(VarNext,bitIndex48)<->v48(VarCurr,bitIndex512))& (v964(VarNext,bitIndex47)<->v48(VarCurr,bitIndex511))& (v964(VarNext,bitIndex46)<->v48(VarCurr,bitIndex510))& (v964(VarNext,bitIndex45)<->v48(VarCurr,bitIndex509))& (v964(VarNext,bitIndex44)<->v48(VarCurr,bitIndex508))& (v964(VarNext,bitIndex43)<->v48(VarCurr,bitIndex507))& (v964(VarNext,bitIndex42)<->v48(VarCurr,bitIndex506))& (v964(VarNext,bitIndex41)<->v48(VarCurr,bitIndex505))& (v964(VarNext,bitIndex40)<->v48(VarCurr,bitIndex504))& (v964(VarNext,bitIndex39)<->v48(VarCurr,bitIndex503))& (v964(VarNext,bitIndex38)<->v48(VarCurr,bitIndex502))& (v964(VarNext,bitIndex37)<->v48(VarCurr,bitIndex501))& (v964(VarNext,bitIndex36)<->v48(VarCurr,bitIndex500))& (v964(VarNext,bitIndex35)<->v48(VarCurr,bitIndex499))& (v964(VarNext,bitIndex34)<->v48(VarCurr,bitIndex498))& (v964(VarNext,bitIndex33)<->v48(VarCurr,bitIndex497))& (v964(VarNext,bitIndex32)<->v48(VarCurr,bitIndex496))& (v964(VarNext,bitIndex31)<->v48(VarCurr,bitIndex495))& (v964(VarNext,bitIndex30)<->v48(VarCurr,bitIndex494))& (v964(VarNext,bitIndex29)<->v48(VarCurr,bitIndex493))& (v964(VarNext,bitIndex28)<->v48(VarCurr,bitIndex492))& (v964(VarNext,bitIndex27)<->v48(VarCurr,bitIndex491))& (v964(VarNext,bitIndex26)<->v48(VarCurr,bitIndex490))& (v964(VarNext,bitIndex25)<->v48(VarCurr,bitIndex489))& (v964(VarNext,bitIndex24)<->v48(VarCurr,bitIndex488))& (v964(VarNext,bitIndex23)<->v48(VarCurr,bitIndex487))& (v964(VarNext,bitIndex22)<->v48(VarCurr,bitIndex486))& (v964(VarNext,bitIndex21)<->v48(VarCurr,bitIndex485))& (v964(VarNext,bitIndex20)<->v48(VarCurr,bitIndex484))& (v964(VarNext,bitIndex19)<->v48(VarCurr,bitIndex483))& (v964(VarNext,bitIndex18)<->v48(VarCurr,bitIndex482))& (v964(VarNext,bitIndex17)<->v48(VarCurr,bitIndex481))& (v964(VarNext,bitIndex16)<->v48(VarCurr,bitIndex480))& (v964(VarNext,bitIndex15)<->v48(VarCurr,bitIndex479))& (v964(VarNext,bitIndex14)<->v48(VarCurr,bitIndex478))& (v964(VarNext,bitIndex13)<->v48(VarCurr,bitIndex477))& (v964(VarNext,bitIndex12)<->v48(VarCurr,bitIndex476))& (v964(VarNext,bitIndex11)<->v48(VarCurr,bitIndex475))& (v964(VarNext,bitIndex10)<->v48(VarCurr,bitIndex474))& (v964(VarNext,bitIndex9)<->v48(VarCurr,bitIndex473))& (v964(VarNext,bitIndex8)<->v48(VarCurr,bitIndex472))& (v964(VarNext,bitIndex7)<->v48(VarCurr,bitIndex471))& (v964(VarNext,bitIndex6)<->v48(VarCurr,bitIndex470))& (v964(VarNext,bitIndex5)<->v48(VarCurr,bitIndex469))& (v964(VarNext,bitIndex4)<->v48(VarCurr,bitIndex468))& (v964(VarNext,bitIndex3)<->v48(VarCurr,bitIndex467))& (v964(VarNext,bitIndex2)<->v48(VarCurr,bitIndex466))& (v964(VarNext,bitIndex1)<->v48(VarCurr,bitIndex465))& (v964(VarNext,bitIndex0)<->v48(VarCurr,bitIndex464)))).
% 78.29/77.65  all VarNext (v966(VarNext)-> (all B (range_115_0(B)-> (v964(VarNext,B)<->v219(VarNext,B))))).
% 78.29/77.65  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v966(VarNext)<->v967(VarNext)&v213(VarNext))).
% 78.29/77.65  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v967(VarNext)<->v969(VarNext)&v188(VarNext))).
% 78.29/77.65  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v969(VarNext)<->v207(VarNext))).
% 78.29/77.65  all VarCurr (v180(VarCurr,bitIndex101)<->v182(VarCurr,bitIndex101)).
% 78.29/77.65  all VarCurr (v182(VarCurr,bitIndex101)<->v184(VarCurr,bitIndex101)).
% 78.29/77.65  all VarCurr (v184(VarCurr,bitIndex101)<->v186(VarCurr,bitIndex101)).
% 78.29/77.65  all VarCurr (v775(VarCurr,bitIndex4)<->v776(VarCurr,bitIndex4)).
% 78.29/77.65  all VarCurr (v777(VarCurr,bitIndex2)<->v911(VarCurr,bitIndex2)).
% 78.29/77.65  all VarCurr (v784(VarCurr,bitIndex6)<->v801(VarCurr,bitIndex6)).
% 78.29/77.65  all VarCurr (v460(VarCurr,bitIndex6)<->v462(VarCurr,bitIndex6)).
% 78.29/77.65  all VarCurr (v462(VarCurr,bitIndex6)<->v464(VarCurr,bitIndex6)).
% 78.29/77.65  all VarCurr (v464(VarCurr,bitIndex6)<->v466(VarCurr,bitIndex6)).
% 78.29/77.65  all VarCurr (v466(VarCurr,bitIndex6)<->v42(VarCurr,bitIndex100)).
% 78.29/77.65  all VarCurr (v42(VarCurr,bitIndex100)<->v44(VarCurr,bitIndex100)).
% 78.29/77.65  all VarCurr (v44(VarCurr,bitIndex100)<->v46(VarCurr,bitIndex100)).
% 78.29/77.65  all VarCurr (v46(VarCurr,bitIndex100)<->v48(VarCurr,bitIndex680)).
% 78.29/77.65  all VarNext (v48(VarNext,bitIndex680)<->v956(VarNext,bitIndex100)).
% 78.29/77.65  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v958(VarNext)-> (v956(VarNext,bitIndex115)<->v48(VarCurr,bitIndex695))& (v956(VarNext,bitIndex114)<->v48(VarCurr,bitIndex694))& (v956(VarNext,bitIndex113)<->v48(VarCurr,bitIndex693))& (v956(VarNext,bitIndex112)<->v48(VarCurr,bitIndex692))& (v956(VarNext,bitIndex111)<->v48(VarCurr,bitIndex691))& (v956(VarNext,bitIndex110)<->v48(VarCurr,bitIndex690))& (v956(VarNext,bitIndex109)<->v48(VarCurr,bitIndex689))& (v956(VarNext,bitIndex108)<->v48(VarCurr,bitIndex688))& (v956(VarNext,bitIndex107)<->v48(VarCurr,bitIndex687))& (v956(VarNext,bitIndex106)<->v48(VarCurr,bitIndex686))& (v956(VarNext,bitIndex105)<->v48(VarCurr,bitIndex685))& (v956(VarNext,bitIndex104)<->v48(VarCurr,bitIndex684))& (v956(VarNext,bitIndex103)<->v48(VarCurr,bitIndex683))& (v956(VarNext,bitIndex102)<->v48(VarCurr,bitIndex682))& (v956(VarNext,bitIndex101)<->v48(VarCurr,bitIndex681))& (v956(VarNext,bitIndex100)<->v48(VarCurr,bitIndex680))& (v956(VarNext,bitIndex99)<->v48(VarCurr,bitIndex679))& (v956(VarNext,bitIndex98)<->v48(VarCurr,bitIndex678))& (v956(VarNext,bitIndex97)<->v48(VarCurr,bitIndex677))& (v956(VarNext,bitIndex96)<->v48(VarCurr,bitIndex676))& (v956(VarNext,bitIndex95)<->v48(VarCurr,bitIndex675))& (v956(VarNext,bitIndex94)<->v48(VarCurr,bitIndex674))& (v956(VarNext,bitIndex93)<->v48(VarCurr,bitIndex673))& (v956(VarNext,bitIndex92)<->v48(VarCurr,bitIndex672))& (v956(VarNext,bitIndex91)<->v48(VarCurr,bitIndex671))& (v956(VarNext,bitIndex90)<->v48(VarCurr,bitIndex670))& (v956(VarNext,bitIndex89)<->v48(VarCurr,bitIndex669))& (v956(VarNext,bitIndex88)<->v48(VarCurr,bitIndex668))& (v956(VarNext,bitIndex87)<->v48(VarCurr,bitIndex667))& (v956(VarNext,bitIndex86)<->v48(VarCurr,bitIndex666))& (v956(VarNext,bitIndex85)<->v48(VarCurr,bitIndex665))& (v956(VarNext,bitIndex84)<->v48(VarCurr,bitIndex664))& (v956(VarNext,bitIndex83)<->v48(VarCurr,bitIndex663))& (v956(VarNext,bitIndex82)<->v48(VarCurr,bitIndex662))& (v956(VarNext,bitIndex81)<->v48(VarCurr,bitIndex661))& (v956(VarNext,bitIndex80)<->v48(VarCurr,bitIndex660))& (v956(VarNext,bitIndex79)<->v48(VarCurr,bitIndex659))& (v956(VarNext,bitIndex78)<->v48(VarCurr,bitIndex658))& (v956(VarNext,bitIndex77)<->v48(VarCurr,bitIndex657))& (v956(VarNext,bitIndex76)<->v48(VarCurr,bitIndex656))& (v956(VarNext,bitIndex75)<->v48(VarCurr,bitIndex655))& (v956(VarNext,bitIndex74)<->v48(VarCurr,bitIndex654))& (v956(VarNext,bitIndex73)<->v48(VarCurr,bitIndex653))& (v956(VarNext,bitIndex72)<->v48(VarCurr,bitIndex652))& (v956(VarNext,bitIndex71)<->v48(VarCurr,bitIndex651))& (v956(VarNext,bitIndex70)<->v48(VarCurr,bitIndex650))& (v956(VarNext,bitIndex69)<->v48(VarCurr,bitIndex649))& (v956(VarNext,bitIndex68)<->v48(VarCurr,bitIndex648))& (v956(VarNext,bitIndex67)<->v48(VarCurr,bitIndex647))& (v956(VarNext,bitIndex66)<->v48(VarCurr,bitIndex646))& (v956(VarNext,bitIndex65)<->v48(VarCurr,bitIndex645))& (v956(VarNext,bitIndex64)<->v48(VarCurr,bitIndex644))& (v956(VarNext,bitIndex63)<->v48(VarCurr,bitIndex643))& (v956(VarNext,bitIndex62)<->v48(VarCurr,bitIndex642))& (v956(VarNext,bitIndex61)<->v48(VarCurr,bitIndex641))& (v956(VarNext,bitIndex60)<->v48(VarCurr,bitIndex640))& (v956(VarNext,bitIndex59)<->v48(VarCurr,bitIndex639))& (v956(VarNext,bitIndex58)<->v48(VarCurr,bitIndex638))& (v956(VarNext,bitIndex57)<->v48(VarCurr,bitIndex637))& (v956(VarNext,bitIndex56)<->v48(VarCurr,bitIndex636))& (v956(VarNext,bitIndex55)<->v48(VarCurr,bitIndex635))& (v956(VarNext,bitIndex54)<->v48(VarCurr,bitIndex634))& (v956(VarNext,bitIndex53)<->v48(VarCurr,bitIndex633))& (v956(VarNext,bitIndex52)<->v48(VarCurr,bitIndex632))& (v956(VarNext,bitIndex51)<->v48(VarCurr,bitIndex631))& (v956(VarNext,bitIndex50)<->v48(VarCurr,bitIndex630))& (v956(VarNext,bitIndex49)<->v48(VarCurr,bitIndex629))& (v956(VarNext,bitIndex48)<->v48(VarCurr,bitIndex628))& (v956(VarNext,bitIndex47)<->v48(VarCurr,bitIndex627))& (v956(VarNext,bitIndex46)<->v48(VarCurr,bitIndex626))& (v956(VarNext,bitIndex45)<->v48(VarCurr,bitIndex625))& (v956(VarNext,bitIndex44)<->v48(VarCurr,bitIndex624))& (v956(VarNext,bitIndex43)<->v48(VarCurr,bitIndex623))& (v956(VarNext,bitIndex42)<->v48(VarCurr,bitIndex622))& (v956(VarNext,bitIndex41)<->v48(VarCurr,bitIndex621))& (v956(VarNext,bitIndex40)<->v48(VarCurr,bitIndex620))& (v956(VarNext,bitIndex39)<->v48(VarCurr,bitIndex619))& (v956(VarNext,bitIndex38)<->v48(VarCurr,bitIndex618))& (v956(VarNext,bitIndex37)<->v48(VarCurr,bitIndex617))& (v956(VarNext,bitIndex36)<->v48(VarCurr,bitIndex616))& (v956(VarNext,bitIndex35)<->v48(VarCurr,bitIndex615))& (v956(VarNext,bitIndex34)<->v48(VarCurr,bitIndex614))& (v956(VarNext,bitIndex33)<->v48(VarCurr,bitIndex613))& (v956(VarNext,bitIndex32)<->v48(VarCurr,bitIndex612))& (v956(VarNext,bitIndex31)<->v48(VarCurr,bitIndex611))& (v956(VarNext,bitIndex30)<->v48(VarCurr,bitIndex610))& (v956(VarNext,bitIndex29)<->v48(VarCurr,bitIndex609))& (v956(VarNext,bitIndex28)<->v48(VarCurr,bitIndex608))& (v956(VarNext,bitIndex27)<->v48(VarCurr,bitIndex607))& (v956(VarNext,bitIndex26)<->v48(VarCurr,bitIndex606))& (v956(VarNext,bitIndex25)<->v48(VarCurr,bitIndex605))& (v956(VarNext,bitIndex24)<->v48(VarCurr,bitIndex604))& (v956(VarNext,bitIndex23)<->v48(VarCurr,bitIndex603))& (v956(VarNext,bitIndex22)<->v48(VarCurr,bitIndex602))& (v956(VarNext,bitIndex21)<->v48(VarCurr,bitIndex601))& (v956(VarNext,bitIndex20)<->v48(VarCurr,bitIndex600))& (v956(VarNext,bitIndex19)<->v48(VarCurr,bitIndex599))& (v956(VarNext,bitIndex18)<->v48(VarCurr,bitIndex598))& (v956(VarNext,bitIndex17)<->v48(VarCurr,bitIndex597))& (v956(VarNext,bitIndex16)<->v48(VarCurr,bitIndex596))& (v956(VarNext,bitIndex15)<->v48(VarCurr,bitIndex595))& (v956(VarNext,bitIndex14)<->v48(VarCurr,bitIndex594))& (v956(VarNext,bitIndex13)<->v48(VarCurr,bitIndex593))& (v956(VarNext,bitIndex12)<->v48(VarCurr,bitIndex592))& (v956(VarNext,bitIndex11)<->v48(VarCurr,bitIndex591))& (v956(VarNext,bitIndex10)<->v48(VarCurr,bitIndex590))& (v956(VarNext,bitIndex9)<->v48(VarCurr,bitIndex589))& (v956(VarNext,bitIndex8)<->v48(VarCurr,bitIndex588))& (v956(VarNext,bitIndex7)<->v48(VarCurr,bitIndex587))& (v956(VarNext,bitIndex6)<->v48(VarCurr,bitIndex586))& (v956(VarNext,bitIndex5)<->v48(VarCurr,bitIndex585))& (v956(VarNext,bitIndex4)<->v48(VarCurr,bitIndex584))& (v956(VarNext,bitIndex3)<->v48(VarCurr,bitIndex583))& (v956(VarNext,bitIndex2)<->v48(VarCurr,bitIndex582))& (v956(VarNext,bitIndex1)<->v48(VarCurr,bitIndex581))& (v956(VarNext,bitIndex0)<->v48(VarCurr,bitIndex580)))).
% 78.29/77.66  all VarNext (v958(VarNext)-> (all B (range_115_0(B)-> (v956(VarNext,B)<->v238(VarNext,B))))).
% 78.29/77.66  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v958(VarNext)<->v959(VarNext)&v233(VarNext))).
% 78.29/77.66  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v959(VarNext)<->v961(VarNext)&v188(VarNext))).
% 78.29/77.66  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v961(VarNext)<->v207(VarNext))).
% 78.29/77.66  all VarNext (v48(VarNext,bitIndex564)<->v948(VarNext,bitIndex100)).
% 78.29/77.66  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v950(VarNext)-> (v948(VarNext,bitIndex115)<->v48(VarCurr,bitIndex579))& (v948(VarNext,bitIndex114)<->v48(VarCurr,bitIndex578))& (v948(VarNext,bitIndex113)<->v48(VarCurr,bitIndex577))& (v948(VarNext,bitIndex112)<->v48(VarCurr,bitIndex576))& (v948(VarNext,bitIndex111)<->v48(VarCurr,bitIndex575))& (v948(VarNext,bitIndex110)<->v48(VarCurr,bitIndex574))& (v948(VarNext,bitIndex109)<->v48(VarCurr,bitIndex573))& (v948(VarNext,bitIndex108)<->v48(VarCurr,bitIndex572))& (v948(VarNext,bitIndex107)<->v48(VarCurr,bitIndex571))& (v948(VarNext,bitIndex106)<->v48(VarCurr,bitIndex570))& (v948(VarNext,bitIndex105)<->v48(VarCurr,bitIndex569))& (v948(VarNext,bitIndex104)<->v48(VarCurr,bitIndex568))& (v948(VarNext,bitIndex103)<->v48(VarCurr,bitIndex567))& (v948(VarNext,bitIndex102)<->v48(VarCurr,bitIndex566))& (v948(VarNext,bitIndex101)<->v48(VarCurr,bitIndex565))& (v948(VarNext,bitIndex100)<->v48(VarCurr,bitIndex564))& (v948(VarNext,bitIndex99)<->v48(VarCurr,bitIndex563))& (v948(VarNext,bitIndex98)<->v48(VarCurr,bitIndex562))& (v948(VarNext,bitIndex97)<->v48(VarCurr,bitIndex561))& (v948(VarNext,bitIndex96)<->v48(VarCurr,bitIndex560))& (v948(VarNext,bitIndex95)<->v48(VarCurr,bitIndex559))& (v948(VarNext,bitIndex94)<->v48(VarCurr,bitIndex558))& (v948(VarNext,bitIndex93)<->v48(VarCurr,bitIndex557))& (v948(VarNext,bitIndex92)<->v48(VarCurr,bitIndex556))& (v948(VarNext,bitIndex91)<->v48(VarCurr,bitIndex555))& (v948(VarNext,bitIndex90)<->v48(VarCurr,bitIndex554))& (v948(VarNext,bitIndex89)<->v48(VarCurr,bitIndex553))& (v948(VarNext,bitIndex88)<->v48(VarCurr,bitIndex552))& (v948(VarNext,bitIndex87)<->v48(VarCurr,bitIndex551))& (v948(VarNext,bitIndex86)<->v48(VarCurr,bitIndex550))& (v948(VarNext,bitIndex85)<->v48(VarCurr,bitIndex549))& (v948(VarNext,bitIndex84)<->v48(VarCurr,bitIndex548))& (v948(VarNext,bitIndex83)<->v48(VarCurr,bitIndex547))& (v948(VarNext,bitIndex82)<->v48(VarCurr,bitIndex546))& (v948(VarNext,bitIndex81)<->v48(VarCurr,bitIndex545))& (v948(VarNext,bitIndex80)<->v48(VarCurr,bitIndex544))& (v948(VarNext,bitIndex79)<->v48(VarCurr,bitIndex543))& (v948(VarNext,bitIndex78)<->v48(VarCurr,bitIndex542))& (v948(VarNext,bitIndex77)<->v48(VarCurr,bitIndex541))& (v948(VarNext,bitIndex76)<->v48(VarCurr,bitIndex540))& (v948(VarNext,bitIndex75)<->v48(VarCurr,bitIndex539))& (v948(VarNext,bitIndex74)<->v48(VarCurr,bitIndex538))& (v948(VarNext,bitIndex73)<->v48(VarCurr,bitIndex537))& (v948(VarNext,bitIndex72)<->v48(VarCurr,bitIndex536))& (v948(VarNext,bitIndex71)<->v48(VarCurr,bitIndex535))& (v948(VarNext,bitIndex70)<->v48(VarCurr,bitIndex534))& (v948(VarNext,bitIndex69)<->v48(VarCurr,bitIndex533))& (v948(VarNext,bitIndex68)<->v48(VarCurr,bitIndex532))& (v948(VarNext,bitIndex67)<->v48(VarCurr,bitIndex531))& (v948(VarNext,bitIndex66)<->v48(VarCurr,bitIndex530))& (v948(VarNext,bitIndex65)<->v48(VarCurr,bitIndex529))& (v948(VarNext,bitIndex64)<->v48(VarCurr,bitIndex528))& (v948(VarNext,bitIndex63)<->v48(VarCurr,bitIndex527))& (v948(VarNext,bitIndex62)<->v48(VarCurr,bitIndex526))& (v948(VarNext,bitIndex61)<->v48(VarCurr,bitIndex525))& (v948(VarNext,bitIndex60)<->v48(VarCurr,bitIndex524))& (v948(VarNext,bitIndex59)<->v48(VarCurr,bitIndex523))& (v948(VarNext,bitIndex58)<->v48(VarCurr,bitIndex522))& (v948(VarNext,bitIndex57)<->v48(VarCurr,bitIndex521))& (v948(VarNext,bitIndex56)<->v48(VarCurr,bitIndex520))& (v948(VarNext,bitIndex55)<->v48(VarCurr,bitIndex519))& (v948(VarNext,bitIndex54)<->v48(VarCurr,bitIndex518))& (v948(VarNext,bitIndex53)<->v48(VarCurr,bitIndex517))& (v948(VarNext,bitIndex52)<->v48(VarCurr,bitIndex516))& (v948(VarNext,bitIndex51)<->v48(VarCurr,bitIndex515))& (v948(VarNext,bitIndex50)<->v48(VarCurr,bitIndex514))& (v948(VarNext,bitIndex49)<->v48(VarCurr,bitIndex513))& (v948(VarNext,bitIndex48)<->v48(VarCurr,bitIndex512))& (v948(VarNext,bitIndex47)<->v48(VarCurr,bitIndex511))& (v948(VarNext,bitIndex46)<->v48(VarCurr,bitIndex510))& (v948(VarNext,bitIndex45)<->v48(VarCurr,bitIndex509))& (v948(VarNext,bitIndex44)<->v48(VarCurr,bitIndex508))& (v948(VarNext,bitIndex43)<->v48(VarCurr,bitIndex507))& (v948(VarNext,bitIndex42)<->v48(VarCurr,bitIndex506))& (v948(VarNext,bitIndex41)<->v48(VarCurr,bitIndex505))& (v948(VarNext,bitIndex40)<->v48(VarCurr,bitIndex504))& (v948(VarNext,bitIndex39)<->v48(VarCurr,bitIndex503))& (v948(VarNext,bitIndex38)<->v48(VarCurr,bitIndex502))& (v948(VarNext,bitIndex37)<->v48(VarCurr,bitIndex501))& (v948(VarNext,bitIndex36)<->v48(VarCurr,bitIndex500))& (v948(VarNext,bitIndex35)<->v48(VarCurr,bitIndex499))& (v948(VarNext,bitIndex34)<->v48(VarCurr,bitIndex498))& (v948(VarNext,bitIndex33)<->v48(VarCurr,bitIndex497))& (v948(VarNext,bitIndex32)<->v48(VarCurr,bitIndex496))& (v948(VarNext,bitIndex31)<->v48(VarCurr,bitIndex495))& (v948(VarNext,bitIndex30)<->v48(VarCurr,bitIndex494))& (v948(VarNext,bitIndex29)<->v48(VarCurr,bitIndex493))& (v948(VarNext,bitIndex28)<->v48(VarCurr,bitIndex492))& (v948(VarNext,bitIndex27)<->v48(VarCurr,bitIndex491))& (v948(VarNext,bitIndex26)<->v48(VarCurr,bitIndex490))& (v948(VarNext,bitIndex25)<->v48(VarCurr,bitIndex489))& (v948(VarNext,bitIndex24)<->v48(VarCurr,bitIndex488))& (v948(VarNext,bitIndex23)<->v48(VarCurr,bitIndex487))& (v948(VarNext,bitIndex22)<->v48(VarCurr,bitIndex486))& (v948(VarNext,bitIndex21)<->v48(VarCurr,bitIndex485))& (v948(VarNext,bitIndex20)<->v48(VarCurr,bitIndex484))& (v948(VarNext,bitIndex19)<->v48(VarCurr,bitIndex483))& (v948(VarNext,bitIndex18)<->v48(VarCurr,bitIndex482))& (v948(VarNext,bitIndex17)<->v48(VarCurr,bitIndex481))& (v948(VarNext,bitIndex16)<->v48(VarCurr,bitIndex480))& (v948(VarNext,bitIndex15)<->v48(VarCurr,bitIndex479))& (v948(VarNext,bitIndex14)<->v48(VarCurr,bitIndex478))& (v948(VarNext,bitIndex13)<->v48(VarCurr,bitIndex477))& (v948(VarNext,bitIndex12)<->v48(VarCurr,bitIndex476))& (v948(VarNext,bitIndex11)<->v48(VarCurr,bitIndex475))& (v948(VarNext,bitIndex10)<->v48(VarCurr,bitIndex474))& (v948(VarNext,bitIndex9)<->v48(VarCurr,bitIndex473))& (v948(VarNext,bitIndex8)<->v48(VarCurr,bitIndex472))& (v948(VarNext,bitIndex7)<->v48(VarCurr,bitIndex471))& (v948(VarNext,bitIndex6)<->v48(VarCurr,bitIndex470))& (v948(VarNext,bitIndex5)<->v48(VarCurr,bitIndex469))& (v948(VarNext,bitIndex4)<->v48(VarCurr,bitIndex468))& (v948(VarNext,bitIndex3)<->v48(VarCurr,bitIndex467))& (v948(VarNext,bitIndex2)<->v48(VarCurr,bitIndex466))& (v948(VarNext,bitIndex1)<->v48(VarCurr,bitIndex465))& (v948(VarNext,bitIndex0)<->v48(VarCurr,bitIndex464)))).
% 78.29/77.66  all VarNext (v950(VarNext)-> (all B (range_115_0(B)-> (v948(VarNext,B)<->v219(VarNext,B))))).
% 78.29/77.66  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v950(VarNext)<->v951(VarNext)&v213(VarNext))).
% 78.29/77.66  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v951(VarNext)<->v953(VarNext)&v188(VarNext))).
% 78.29/77.66  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v953(VarNext)<->v207(VarNext))).
% 78.29/77.66  all VarCurr (v180(VarCurr,bitIndex100)<->v182(VarCurr,bitIndex100)).
% 78.29/77.66  all VarCurr (v182(VarCurr,bitIndex100)<->v184(VarCurr,bitIndex100)).
% 78.29/77.66  all VarCurr (v184(VarCurr,bitIndex100)<->v186(VarCurr,bitIndex100)).
% 78.29/77.66  all VarCurr (v775(VarCurr,bitIndex3)<->v776(VarCurr,bitIndex3)).
% 78.29/77.66  all VarCurr (v777(VarCurr,bitIndex1)<->v911(VarCurr,bitIndex1)).
% 78.29/77.66  all VarCurr (v784(VarCurr,bitIndex5)<->v801(VarCurr,bitIndex5)).
% 78.29/77.66  all VarCurr (v460(VarCurr,bitIndex5)<->v462(VarCurr,bitIndex5)).
% 78.29/77.66  all VarCurr (v462(VarCurr,bitIndex5)<->v464(VarCurr,bitIndex5)).
% 78.29/77.66  all VarCurr (v464(VarCurr,bitIndex5)<->v466(VarCurr,bitIndex5)).
% 78.29/77.66  all VarCurr (v466(VarCurr,bitIndex5)<->v42(VarCurr,bitIndex99)).
% 78.29/77.66  all VarCurr (v42(VarCurr,bitIndex99)<->v44(VarCurr,bitIndex99)).
% 78.29/77.66  all VarCurr (v44(VarCurr,bitIndex99)<->v46(VarCurr,bitIndex99)).
% 78.29/77.66  all VarCurr (v46(VarCurr,bitIndex99)<->v48(VarCurr,bitIndex679)).
% 78.29/77.66  all VarNext (v48(VarNext,bitIndex679)<->v940(VarNext,bitIndex99)).
% 78.29/77.66  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v942(VarNext)-> (v940(VarNext,bitIndex115)<->v48(VarCurr,bitIndex695))& (v940(VarNext,bitIndex114)<->v48(VarCurr,bitIndex694))& (v940(VarNext,bitIndex113)<->v48(VarCurr,bitIndex693))& (v940(VarNext,bitIndex112)<->v48(VarCurr,bitIndex692))& (v940(VarNext,bitIndex111)<->v48(VarCurr,bitIndex691))& (v940(VarNext,bitIndex110)<->v48(VarCurr,bitIndex690))& (v940(VarNext,bitIndex109)<->v48(VarCurr,bitIndex689))& (v940(VarNext,bitIndex108)<->v48(VarCurr,bitIndex688))& (v940(VarNext,bitIndex107)<->v48(VarCurr,bitIndex687))& (v940(VarNext,bitIndex106)<->v48(VarCurr,bitIndex686))& (v940(VarNext,bitIndex105)<->v48(VarCurr,bitIndex685))& (v940(VarNext,bitIndex104)<->v48(VarCurr,bitIndex684))& (v940(VarNext,bitIndex103)<->v48(VarCurr,bitIndex683))& (v940(VarNext,bitIndex102)<->v48(VarCurr,bitIndex682))& (v940(VarNext,bitIndex101)<->v48(VarCurr,bitIndex681))& (v940(VarNext,bitIndex100)<->v48(VarCurr,bitIndex680))& (v940(VarNext,bitIndex99)<->v48(VarCurr,bitIndex679))& (v940(VarNext,bitIndex98)<->v48(VarCurr,bitIndex678))& (v940(VarNext,bitIndex97)<->v48(VarCurr,bitIndex677))& (v940(VarNext,bitIndex96)<->v48(VarCurr,bitIndex676))& (v940(VarNext,bitIndex95)<->v48(VarCurr,bitIndex675))& (v940(VarNext,bitIndex94)<->v48(VarCurr,bitIndex674))& (v940(VarNext,bitIndex93)<->v48(VarCurr,bitIndex673))& (v940(VarNext,bitIndex92)<->v48(VarCurr,bitIndex672))& (v940(VarNext,bitIndex91)<->v48(VarCurr,bitIndex671))& (v940(VarNext,bitIndex90)<->v48(VarCurr,bitIndex670))& (v940(VarNext,bitIndex89)<->v48(VarCurr,bitIndex669))& (v940(VarNext,bitIndex88)<->v48(VarCurr,bitIndex668))& (v940(VarNext,bitIndex87)<->v48(VarCurr,bitIndex667))& (v940(VarNext,bitIndex86)<->v48(VarCurr,bitIndex666))& (v940(VarNext,bitIndex85)<->v48(VarCurr,bitIndex665))& (v940(VarNext,bitIndex84)<->v48(VarCurr,bitIndex664))& (v940(VarNext,bitIndex83)<->v48(VarCurr,bitIndex663))& (v940(VarNext,bitIndex82)<->v48(VarCurr,bitIndex662))& (v940(VarNext,bitIndex81)<->v48(VarCurr,bitIndex661))& (v940(VarNext,bitIndex80)<->v48(VarCurr,bitIndex660))& (v940(VarNext,bitIndex79)<->v48(VarCurr,bitIndex659))& (v940(VarNext,bitIndex78)<->v48(VarCurr,bitIndex658))& (v940(VarNext,bitIndex77)<->v48(VarCurr,bitIndex657))& (v940(VarNext,bitIndex76)<->v48(VarCurr,bitIndex656))& (v940(VarNext,bitIndex75)<->v48(VarCurr,bitIndex655))& (v940(VarNext,bitIndex74)<->v48(VarCurr,bitIndex654))& (v940(VarNext,bitIndex73)<->v48(VarCurr,bitIndex653))& (v940(VarNext,bitIndex72)<->v48(VarCurr,bitIndex652))& (v940(VarNext,bitIndex71)<->v48(VarCurr,bitIndex651))& (v940(VarNext,bitIndex70)<->v48(VarCurr,bitIndex650))& (v940(VarNext,bitIndex69)<->v48(VarCurr,bitIndex649))& (v940(VarNext,bitIndex68)<->v48(VarCurr,bitIndex648))& (v940(VarNext,bitIndex67)<->v48(VarCurr,bitIndex647))& (v940(VarNext,bitIndex66)<->v48(VarCurr,bitIndex646))& (v940(VarNext,bitIndex65)<->v48(VarCurr,bitIndex645))& (v940(VarNext,bitIndex64)<->v48(VarCurr,bitIndex644))& (v940(VarNext,bitIndex63)<->v48(VarCurr,bitIndex643))& (v940(VarNext,bitIndex62)<->v48(VarCurr,bitIndex642))& (v940(VarNext,bitIndex61)<->v48(VarCurr,bitIndex641))& (v940(VarNext,bitIndex60)<->v48(VarCurr,bitIndex640))& (v940(VarNext,bitIndex59)<->v48(VarCurr,bitIndex639))& (v940(VarNext,bitIndex58)<->v48(VarCurr,bitIndex638))& (v940(VarNext,bitIndex57)<->v48(VarCurr,bitIndex637))& (v940(VarNext,bitIndex56)<->v48(VarCurr,bitIndex636))& (v940(VarNext,bitIndex55)<->v48(VarCurr,bitIndex635))& (v940(VarNext,bitIndex54)<->v48(VarCurr,bitIndex634))& (v940(VarNext,bitIndex53)<->v48(VarCurr,bitIndex633))& (v940(VarNext,bitIndex52)<->v48(VarCurr,bitIndex632))& (v940(VarNext,bitIndex51)<->v48(VarCurr,bitIndex631))& (v940(VarNext,bitIndex50)<->v48(VarCurr,bitIndex630))& (v940(VarNext,bitIndex49)<->v48(VarCurr,bitIndex629))& (v940(VarNext,bitIndex48)<->v48(VarCurr,bitIndex628))& (v940(VarNext,bitIndex47)<->v48(VarCurr,bitIndex627))& (v940(VarNext,bitIndex46)<->v48(VarCurr,bitIndex626))& (v940(VarNext,bitIndex45)<->v48(VarCurr,bitIndex625))& (v940(VarNext,bitIndex44)<->v48(VarCurr,bitIndex624))& (v940(VarNext,bitIndex43)<->v48(VarCurr,bitIndex623))& (v940(VarNext,bitIndex42)<->v48(VarCurr,bitIndex622))& (v940(VarNext,bitIndex41)<->v48(VarCurr,bitIndex621))& (v940(VarNext,bitIndex40)<->v48(VarCurr,bitIndex620))& (v940(VarNext,bitIndex39)<->v48(VarCurr,bitIndex619))& (v940(VarNext,bitIndex38)<->v48(VarCurr,bitIndex618))& (v940(VarNext,bitIndex37)<->v48(VarCurr,bitIndex617))& (v940(VarNext,bitIndex36)<->v48(VarCurr,bitIndex616))& (v940(VarNext,bitIndex35)<->v48(VarCurr,bitIndex615))& (v940(VarNext,bitIndex34)<->v48(VarCurr,bitIndex614))& (v940(VarNext,bitIndex33)<->v48(VarCurr,bitIndex613))& (v940(VarNext,bitIndex32)<->v48(VarCurr,bitIndex612))& (v940(VarNext,bitIndex31)<->v48(VarCurr,bitIndex611))& (v940(VarNext,bitIndex30)<->v48(VarCurr,bitIndex610))& (v940(VarNext,bitIndex29)<->v48(VarCurr,bitIndex609))& (v940(VarNext,bitIndex28)<->v48(VarCurr,bitIndex608))& (v940(VarNext,bitIndex27)<->v48(VarCurr,bitIndex607))& (v940(VarNext,bitIndex26)<->v48(VarCurr,bitIndex606))& (v940(VarNext,bitIndex25)<->v48(VarCurr,bitIndex605))& (v940(VarNext,bitIndex24)<->v48(VarCurr,bitIndex604))& (v940(VarNext,bitIndex23)<->v48(VarCurr,bitIndex603))& (v940(VarNext,bitIndex22)<->v48(VarCurr,bitIndex602))& (v940(VarNext,bitIndex21)<->v48(VarCurr,bitIndex601))& (v940(VarNext,bitIndex20)<->v48(VarCurr,bitIndex600))& (v940(VarNext,bitIndex19)<->v48(VarCurr,bitIndex599))& (v940(VarNext,bitIndex18)<->v48(VarCurr,bitIndex598))& (v940(VarNext,bitIndex17)<->v48(VarCurr,bitIndex597))& (v940(VarNext,bitIndex16)<->v48(VarCurr,bitIndex596))& (v940(VarNext,bitIndex15)<->v48(VarCurr,bitIndex595))& (v940(VarNext,bitIndex14)<->v48(VarCurr,bitIndex594))& (v940(VarNext,bitIndex13)<->v48(VarCurr,bitIndex593))& (v940(VarNext,bitIndex12)<->v48(VarCurr,bitIndex592))& (v940(VarNext,bitIndex11)<->v48(VarCurr,bitIndex591))& (v940(VarNext,bitIndex10)<->v48(VarCurr,bitIndex590))& (v940(VarNext,bitIndex9)<->v48(VarCurr,bitIndex589))& (v940(VarNext,bitIndex8)<->v48(VarCurr,bitIndex588))& (v940(VarNext,bitIndex7)<->v48(VarCurr,bitIndex587))& (v940(VarNext,bitIndex6)<->v48(VarCurr,bitIndex586))& (v940(VarNext,bitIndex5)<->v48(VarCurr,bitIndex585))& (v940(VarNext,bitIndex4)<->v48(VarCurr,bitIndex584))& (v940(VarNext,bitIndex3)<->v48(VarCurr,bitIndex583))& (v940(VarNext,bitIndex2)<->v48(VarCurr,bitIndex582))& (v940(VarNext,bitIndex1)<->v48(VarCurr,bitIndex581))& (v940(VarNext,bitIndex0)<->v48(VarCurr,bitIndex580)))).
% 78.29/77.66  all VarNext (v942(VarNext)-> (all B (range_115_0(B)-> (v940(VarNext,B)<->v238(VarNext,B))))).
% 78.29/77.66  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v942(VarNext)<->v943(VarNext)&v233(VarNext))).
% 78.29/77.66  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v943(VarNext)<->v945(VarNext)&v188(VarNext))).
% 78.29/77.66  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v945(VarNext)<->v207(VarNext))).
% 78.29/77.66  all VarNext (v48(VarNext,bitIndex563)<->v932(VarNext,bitIndex99)).
% 78.29/77.66  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v934(VarNext)-> (v932(VarNext,bitIndex115)<->v48(VarCurr,bitIndex579))& (v932(VarNext,bitIndex114)<->v48(VarCurr,bitIndex578))& (v932(VarNext,bitIndex113)<->v48(VarCurr,bitIndex577))& (v932(VarNext,bitIndex112)<->v48(VarCurr,bitIndex576))& (v932(VarNext,bitIndex111)<->v48(VarCurr,bitIndex575))& (v932(VarNext,bitIndex110)<->v48(VarCurr,bitIndex574))& (v932(VarNext,bitIndex109)<->v48(VarCurr,bitIndex573))& (v932(VarNext,bitIndex108)<->v48(VarCurr,bitIndex572))& (v932(VarNext,bitIndex107)<->v48(VarCurr,bitIndex571))& (v932(VarNext,bitIndex106)<->v48(VarCurr,bitIndex570))& (v932(VarNext,bitIndex105)<->v48(VarCurr,bitIndex569))& (v932(VarNext,bitIndex104)<->v48(VarCurr,bitIndex568))& (v932(VarNext,bitIndex103)<->v48(VarCurr,bitIndex567))& (v932(VarNext,bitIndex102)<->v48(VarCurr,bitIndex566))& (v932(VarNext,bitIndex101)<->v48(VarCurr,bitIndex565))& (v932(VarNext,bitIndex100)<->v48(VarCurr,bitIndex564))& (v932(VarNext,bitIndex99)<->v48(VarCurr,bitIndex563))& (v932(VarNext,bitIndex98)<->v48(VarCurr,bitIndex562))& (v932(VarNext,bitIndex97)<->v48(VarCurr,bitIndex561))& (v932(VarNext,bitIndex96)<->v48(VarCurr,bitIndex560))& (v932(VarNext,bitIndex95)<->v48(VarCurr,bitIndex559))& (v932(VarNext,bitIndex94)<->v48(VarCurr,bitIndex558))& (v932(VarNext,bitIndex93)<->v48(VarCurr,bitIndex557))& (v932(VarNext,bitIndex92)<->v48(VarCurr,bitIndex556))& (v932(VarNext,bitIndex91)<->v48(VarCurr,bitIndex555))& (v932(VarNext,bitIndex90)<->v48(VarCurr,bitIndex554))& (v932(VarNext,bitIndex89)<->v48(VarCurr,bitIndex553))& (v932(VarNext,bitIndex88)<->v48(VarCurr,bitIndex552))& (v932(VarNext,bitIndex87)<->v48(VarCurr,bitIndex551))& (v932(VarNext,bitIndex86)<->v48(VarCurr,bitIndex550))& (v932(VarNext,bitIndex85)<->v48(VarCurr,bitIndex549))& (v932(VarNext,bitIndex84)<->v48(VarCurr,bitIndex548))& (v932(VarNext,bitIndex83)<->v48(VarCurr,bitIndex547))& (v932(VarNext,bitIndex82)<->v48(VarCurr,bitIndex546))& (v932(VarNext,bitIndex81)<->v48(VarCurr,bitIndex545))& (v932(VarNext,bitIndex80)<->v48(VarCurr,bitIndex544))& (v932(VarNext,bitIndex79)<->v48(VarCurr,bitIndex543))& (v932(VarNext,bitIndex78)<->v48(VarCurr,bitIndex542))& (v932(VarNext,bitIndex77)<->v48(VarCurr,bitIndex541))& (v932(VarNext,bitIndex76)<->v48(VarCurr,bitIndex540))& (v932(VarNext,bitIndex75)<->v48(VarCurr,bitIndex539))& (v932(VarNext,bitIndex74)<->v48(VarCurr,bitIndex538))& (v932(VarNext,bitIndex73)<->v48(VarCurr,bitIndex537))& (v932(VarNext,bitIndex72)<->v48(VarCurr,bitIndex536))& (v932(VarNext,bitIndex71)<->v48(VarCurr,bitIndex535))& (v932(VarNext,bitIndex70)<->v48(VarCurr,bitIndex534))& (v932(VarNext,bitIndex69)<->v48(VarCurr,bitIndex533))& (v932(VarNext,bitIndex68)<->v48(VarCurr,bitIndex532))& (v932(VarNext,bitIndex67)<->v48(VarCurr,bitIndex531))& (v932(VarNext,bitIndex66)<->v48(VarCurr,bitIndex530))& (v932(VarNext,bitIndex65)<->v48(VarCurr,bitIndex529))& (v932(VarNext,bitIndex64)<->v48(VarCurr,bitIndex528))& (v932(VarNext,bitIndex63)<->v48(VarCurr,bitIndex527))& (v932(VarNext,bitIndex62)<->v48(VarCurr,bitIndex526))& (v932(VarNext,bitIndex61)<->v48(VarCurr,bitIndex525))& (v932(VarNext,bitIndex60)<->v48(VarCurr,bitIndex524))& (v932(VarNext,bitIndex59)<->v48(VarCurr,bitIndex523))& (v932(VarNext,bitIndex58)<->v48(VarCurr,bitIndex522))& (v932(VarNext,bitIndex57)<->v48(VarCurr,bitIndex521))& (v932(VarNext,bitIndex56)<->v48(VarCurr,bitIndex520))& (v932(VarNext,bitIndex55)<->v48(VarCurr,bitIndex519))& (v932(VarNext,bitIndex54)<->v48(VarCurr,bitIndex518))& (v932(VarNext,bitIndex53)<->v48(VarCurr,bitIndex517))& (v932(VarNext,bitIndex52)<->v48(VarCurr,bitIndex516))& (v932(VarNext,bitIndex51)<->v48(VarCurr,bitIndex515))& (v932(VarNext,bitIndex50)<->v48(VarCurr,bitIndex514))& (v932(VarNext,bitIndex49)<->v48(VarCurr,bitIndex513))& (v932(VarNext,bitIndex48)<->v48(VarCurr,bitIndex512))& (v932(VarNext,bitIndex47)<->v48(VarCurr,bitIndex511))& (v932(VarNext,bitIndex46)<->v48(VarCurr,bitIndex510))& (v932(VarNext,bitIndex45)<->v48(VarCurr,bitIndex509))& (v932(VarNext,bitIndex44)<->v48(VarCurr,bitIndex508))& (v932(VarNext,bitIndex43)<->v48(VarCurr,bitIndex507))& (v932(VarNext,bitIndex42)<->v48(VarCurr,bitIndex506))& (v932(VarNext,bitIndex41)<->v48(VarCurr,bitIndex505))& (v932(VarNext,bitIndex40)<->v48(VarCurr,bitIndex504))& (v932(VarNext,bitIndex39)<->v48(VarCurr,bitIndex503))& (v932(VarNext,bitIndex38)<->v48(VarCurr,bitIndex502))& (v932(VarNext,bitIndex37)<->v48(VarCurr,bitIndex501))& (v932(VarNext,bitIndex36)<->v48(VarCurr,bitIndex500))& (v932(VarNext,bitIndex35)<->v48(VarCurr,bitIndex499))& (v932(VarNext,bitIndex34)<->v48(VarCurr,bitIndex498))& (v932(VarNext,bitIndex33)<->v48(VarCurr,bitIndex497))& (v932(VarNext,bitIndex32)<->v48(VarCurr,bitIndex496))& (v932(VarNext,bitIndex31)<->v48(VarCurr,bitIndex495))& (v932(VarNext,bitIndex30)<->v48(VarCurr,bitIndex494))& (v932(VarNext,bitIndex29)<->v48(VarCurr,bitIndex493))& (v932(VarNext,bitIndex28)<->v48(VarCurr,bitIndex492))& (v932(VarNext,bitIndex27)<->v48(VarCurr,bitIndex491))& (v932(VarNext,bitIndex26)<->v48(VarCurr,bitIndex490))& (v932(VarNext,bitIndex25)<->v48(VarCurr,bitIndex489))& (v932(VarNext,bitIndex24)<->v48(VarCurr,bitIndex488))& (v932(VarNext,bitIndex23)<->v48(VarCurr,bitIndex487))& (v932(VarNext,bitIndex22)<->v48(VarCurr,bitIndex486))& (v932(VarNext,bitIndex21)<->v48(VarCurr,bitIndex485))& (v932(VarNext,bitIndex20)<->v48(VarCurr,bitIndex484))& (v932(VarNext,bitIndex19)<->v48(VarCurr,bitIndex483))& (v932(VarNext,bitIndex18)<->v48(VarCurr,bitIndex482))& (v932(VarNext,bitIndex17)<->v48(VarCurr,bitIndex481))& (v932(VarNext,bitIndex16)<->v48(VarCurr,bitIndex480))& (v932(VarNext,bitIndex15)<->v48(VarCurr,bitIndex479))& (v932(VarNext,bitIndex14)<->v48(VarCurr,bitIndex478))& (v932(VarNext,bitIndex13)<->v48(VarCurr,bitIndex477))& (v932(VarNext,bitIndex12)<->v48(VarCurr,bitIndex476))& (v932(VarNext,bitIndex11)<->v48(VarCurr,bitIndex475))& (v932(VarNext,bitIndex10)<->v48(VarCurr,bitIndex474))& (v932(VarNext,bitIndex9)<->v48(VarCurr,bitIndex473))& (v932(VarNext,bitIndex8)<->v48(VarCurr,bitIndex472))& (v932(VarNext,bitIndex7)<->v48(VarCurr,bitIndex471))& (v932(VarNext,bitIndex6)<->v48(VarCurr,bitIndex470))& (v932(VarNext,bitIndex5)<->v48(VarCurr,bitIndex469))& (v932(VarNext,bitIndex4)<->v48(VarCurr,bitIndex468))& (v932(VarNext,bitIndex3)<->v48(VarCurr,bitIndex467))& (v932(VarNext,bitIndex2)<->v48(VarCurr,bitIndex466))& (v932(VarNext,bitIndex1)<->v48(VarCurr,bitIndex465))& (v932(VarNext,bitIndex0)<->v48(VarCurr,bitIndex464)))).
% 78.29/77.67  all VarNext (v934(VarNext)-> (all B (range_115_0(B)-> (v932(VarNext,B)<->v219(VarNext,B))))).
% 78.29/77.67  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v934(VarNext)<->v935(VarNext)&v213(VarNext))).
% 78.29/77.67  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v935(VarNext)<->v937(VarNext)&v188(VarNext))).
% 78.29/77.67  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v937(VarNext)<->v207(VarNext))).
% 78.29/77.67  all VarCurr (v180(VarCurr,bitIndex99)<->v182(VarCurr,bitIndex99)).
% 78.29/77.67  all VarCurr (v182(VarCurr,bitIndex99)<->v184(VarCurr,bitIndex99)).
% 78.29/77.67  all VarCurr (v184(VarCurr,bitIndex99)<->v186(VarCurr,bitIndex99)).
% 78.29/77.67  all VarCurr (v775(VarCurr,bitIndex2)<->v776(VarCurr,bitIndex2)).
% 78.29/77.67  all VarCurr (v777(VarCurr,bitIndex0)<->v911(VarCurr,bitIndex0)).
% 78.29/77.67  all VarCurr (-v780(VarCurr)-> (v911(VarCurr,bitIndex3)<->v784(VarCurr,bitIndex7))& (v911(VarCurr,bitIndex2)<->v784(VarCurr,bitIndex6))& (v911(VarCurr,bitIndex1)<->v784(VarCurr,bitIndex5))& (v911(VarCurr,bitIndex0)<->v784(VarCurr,bitIndex4))).
% 78.29/77.67  all VarCurr (v780(VarCurr)-> (all B (range_3_0(B)-> (v911(VarCurr,B)<->v912(VarCurr,B))))).
% 78.29/77.67  all VarCurr (v912(VarCurr,bitIndex0)<->v928(VarCurr)).
% 78.29/77.67  all VarCurr (v912(VarCurr,bitIndex1)<->v926(VarCurr)).
% 78.29/77.67  all VarCurr (v912(VarCurr,bitIndex2)<->v921(VarCurr)).
% 78.29/77.67  all VarCurr (v912(VarCurr,bitIndex3)<->v914(VarCurr)).
% 78.29/77.67  all VarCurr (v926(VarCurr)<->v927(VarCurr)&v930(VarCurr)).
% 78.29/77.67  all VarCurr (v930(VarCurr)<->v784(VarCurr,bitIndex4)|v784(VarCurr,bitIndex5)).
% 78.29/77.67  all VarCurr (v927(VarCurr)<->v928(VarCurr)|v929(VarCurr)).
% 78.29/77.67  all VarCurr (-v929(VarCurr)<->v784(VarCurr,bitIndex5)).
% 78.29/77.67  all VarCurr (-v928(VarCurr)<->v784(VarCurr,bitIndex4)).
% 78.29/77.67  all VarCurr (v921(VarCurr)<->v922(VarCurr)&v925(VarCurr)).
% 78.29/77.67  all VarCurr (v925(VarCurr)<->v918(VarCurr)|v784(VarCurr,bitIndex6)).
% 78.29/77.67  all VarCurr (v922(VarCurr)<->v923(VarCurr)|v924(VarCurr)).
% 78.29/77.67  all VarCurr (-v924(VarCurr)<->v784(VarCurr,bitIndex6)).
% 78.29/77.67  all VarCurr (-v923(VarCurr)<->v918(VarCurr)).
% 78.29/77.67  all VarCurr (v914(VarCurr)<->v915(VarCurr)&v920(VarCurr)).
% 78.29/77.67  all VarCurr (v920(VarCurr)<->v917(VarCurr)|v784(VarCurr,bitIndex7)).
% 78.29/77.67  all VarCurr (v915(VarCurr)<->v916(VarCurr)|v919(VarCurr)).
% 78.29/77.67  all VarCurr (-v919(VarCurr)<->v784(VarCurr,bitIndex7)).
% 78.29/77.67  all VarCurr (-v916(VarCurr)<->v917(VarCurr)).
% 78.29/77.67  all VarCurr (v917(VarCurr)<->v918(VarCurr)&v784(VarCurr,bitIndex6)).
% 78.29/77.67  all VarCurr (v918(VarCurr)<->v784(VarCurr,bitIndex4)&v784(VarCurr,bitIndex5)).
% 78.29/77.67  all VarCurr (v784(VarCurr,bitIndex4)<->v801(VarCurr,bitIndex4)).
% 78.29/77.67  all VarCurr (v460(VarCurr,bitIndex4)<->v462(VarCurr,bitIndex4)).
% 78.29/77.67  all VarCurr (v462(VarCurr,bitIndex4)<->v464(VarCurr,bitIndex4)).
% 78.29/77.67  all VarCurr (v464(VarCurr,bitIndex4)<->v466(VarCurr,bitIndex4)).
% 78.29/77.67  all VarCurr (v466(VarCurr,bitIndex4)<->v42(VarCurr,bitIndex98)).
% 78.29/77.67  all VarCurr (v42(VarCurr,bitIndex98)<->v44(VarCurr,bitIndex98)).
% 78.29/77.67  all VarCurr (v44(VarCurr,bitIndex98)<->v46(VarCurr,bitIndex98)).
% 78.29/77.67  all VarCurr (v46(VarCurr,bitIndex98)<->v48(VarCurr,bitIndex678)).
% 78.29/77.67  all VarNext (v48(VarNext,bitIndex678)<->v903(VarNext,bitIndex98)).
% 78.29/77.67  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v905(VarNext)-> (v903(VarNext,bitIndex115)<->v48(VarCurr,bitIndex695))& (v903(VarNext,bitIndex114)<->v48(VarCurr,bitIndex694))& (v903(VarNext,bitIndex113)<->v48(VarCurr,bitIndex693))& (v903(VarNext,bitIndex112)<->v48(VarCurr,bitIndex692))& (v903(VarNext,bitIndex111)<->v48(VarCurr,bitIndex691))& (v903(VarNext,bitIndex110)<->v48(VarCurr,bitIndex690))& (v903(VarNext,bitIndex109)<->v48(VarCurr,bitIndex689))& (v903(VarNext,bitIndex108)<->v48(VarCurr,bitIndex688))& (v903(VarNext,bitIndex107)<->v48(VarCurr,bitIndex687))& (v903(VarNext,bitIndex106)<->v48(VarCurr,bitIndex686))& (v903(VarNext,bitIndex105)<->v48(VarCurr,bitIndex685))& (v903(VarNext,bitIndex104)<->v48(VarCurr,bitIndex684))& (v903(VarNext,bitIndex103)<->v48(VarCurr,bitIndex683))& (v903(VarNext,bitIndex102)<->v48(VarCurr,bitIndex682))& (v903(VarNext,bitIndex101)<->v48(VarCurr,bitIndex681))& (v903(VarNext,bitIndex100)<->v48(VarCurr,bitIndex680))& (v903(VarNext,bitIndex99)<->v48(VarCurr,bitIndex679))& (v903(VarNext,bitIndex98)<->v48(VarCurr,bitIndex678))& (v903(VarNext,bitIndex97)<->v48(VarCurr,bitIndex677))& (v903(VarNext,bitIndex96)<->v48(VarCurr,bitIndex676))& (v903(VarNext,bitIndex95)<->v48(VarCurr,bitIndex675))& (v903(VarNext,bitIndex94)<->v48(VarCurr,bitIndex674))& (v903(VarNext,bitIndex93)<->v48(VarCurr,bitIndex673))& (v903(VarNext,bitIndex92)<->v48(VarCurr,bitIndex672))& (v903(VarNext,bitIndex91)<->v48(VarCurr,bitIndex671))& (v903(VarNext,bitIndex90)<->v48(VarCurr,bitIndex670))& (v903(VarNext,bitIndex89)<->v48(VarCurr,bitIndex669))& (v903(VarNext,bitIndex88)<->v48(VarCurr,bitIndex668))& (v903(VarNext,bitIndex87)<->v48(VarCurr,bitIndex667))& (v903(VarNext,bitIndex86)<->v48(VarCurr,bitIndex666))& (v903(VarNext,bitIndex85)<->v48(VarCurr,bitIndex665))& (v903(VarNext,bitIndex84)<->v48(VarCurr,bitIndex664))& (v903(VarNext,bitIndex83)<->v48(VarCurr,bitIndex663))& (v903(VarNext,bitIndex82)<->v48(VarCurr,bitIndex662))& (v903(VarNext,bitIndex81)<->v48(VarCurr,bitIndex661))& (v903(VarNext,bitIndex80)<->v48(VarCurr,bitIndex660))& (v903(VarNext,bitIndex79)<->v48(VarCurr,bitIndex659))& (v903(VarNext,bitIndex78)<->v48(VarCurr,bitIndex658))& (v903(VarNext,bitIndex77)<->v48(VarCurr,bitIndex657))& (v903(VarNext,bitIndex76)<->v48(VarCurr,bitIndex656))& (v903(VarNext,bitIndex75)<->v48(VarCurr,bitIndex655))& (v903(VarNext,bitIndex74)<->v48(VarCurr,bitIndex654))& (v903(VarNext,bitIndex73)<->v48(VarCurr,bitIndex653))& (v903(VarNext,bitIndex72)<->v48(VarCurr,bitIndex652))& (v903(VarNext,bitIndex71)<->v48(VarCurr,bitIndex651))& (v903(VarNext,bitIndex70)<->v48(VarCurr,bitIndex650))& (v903(VarNext,bitIndex69)<->v48(VarCurr,bitIndex649))& (v903(VarNext,bitIndex68)<->v48(VarCurr,bitIndex648))& (v903(VarNext,bitIndex67)<->v48(VarCurr,bitIndex647))& (v903(VarNext,bitIndex66)<->v48(VarCurr,bitIndex646))& (v903(VarNext,bitIndex65)<->v48(VarCurr,bitIndex645))& (v903(VarNext,bitIndex64)<->v48(VarCurr,bitIndex644))& (v903(VarNext,bitIndex63)<->v48(VarCurr,bitIndex643))& (v903(VarNext,bitIndex62)<->v48(VarCurr,bitIndex642))& (v903(VarNext,bitIndex61)<->v48(VarCurr,bitIndex641))& (v903(VarNext,bitIndex60)<->v48(VarCurr,bitIndex640))& (v903(VarNext,bitIndex59)<->v48(VarCurr,bitIndex639))& (v903(VarNext,bitIndex58)<->v48(VarCurr,bitIndex638))& (v903(VarNext,bitIndex57)<->v48(VarCurr,bitIndex637))& (v903(VarNext,bitIndex56)<->v48(VarCurr,bitIndex636))& (v903(VarNext,bitIndex55)<->v48(VarCurr,bitIndex635))& (v903(VarNext,bitIndex54)<->v48(VarCurr,bitIndex634))& (v903(VarNext,bitIndex53)<->v48(VarCurr,bitIndex633))& (v903(VarNext,bitIndex52)<->v48(VarCurr,bitIndex632))& (v903(VarNext,bitIndex51)<->v48(VarCurr,bitIndex631))& (v903(VarNext,bitIndex50)<->v48(VarCurr,bitIndex630))& (v903(VarNext,bitIndex49)<->v48(VarCurr,bitIndex629))& (v903(VarNext,bitIndex48)<->v48(VarCurr,bitIndex628))& (v903(VarNext,bitIndex47)<->v48(VarCurr,bitIndex627))& (v903(VarNext,bitIndex46)<->v48(VarCurr,bitIndex626))& (v903(VarNext,bitIndex45)<->v48(VarCurr,bitIndex625))& (v903(VarNext,bitIndex44)<->v48(VarCurr,bitIndex624))& (v903(VarNext,bitIndex43)<->v48(VarCurr,bitIndex623))& (v903(VarNext,bitIndex42)<->v48(VarCurr,bitIndex622))& (v903(VarNext,bitIndex41)<->v48(VarCurr,bitIndex621))& (v903(VarNext,bitIndex40)<->v48(VarCurr,bitIndex620))& (v903(VarNext,bitIndex39)<->v48(VarCurr,bitIndex619))& (v903(VarNext,bitIndex38)<->v48(VarCurr,bitIndex618))& (v903(VarNext,bitIndex37)<->v48(VarCurr,bitIndex617))& (v903(VarNext,bitIndex36)<->v48(VarCurr,bitIndex616))& (v903(VarNext,bitIndex35)<->v48(VarCurr,bitIndex615))& (v903(VarNext,bitIndex34)<->v48(VarCurr,bitIndex614))& (v903(VarNext,bitIndex33)<->v48(VarCurr,bitIndex613))& (v903(VarNext,bitIndex32)<->v48(VarCurr,bitIndex612))& (v903(VarNext,bitIndex31)<->v48(VarCurr,bitIndex611))& (v903(VarNext,bitIndex30)<->v48(VarCurr,bitIndex610))& (v903(VarNext,bitIndex29)<->v48(VarCurr,bitIndex609))& (v903(VarNext,bitIndex28)<->v48(VarCurr,bitIndex608))& (v903(VarNext,bitIndex27)<->v48(VarCurr,bitIndex607))& (v903(VarNext,bitIndex26)<->v48(VarCurr,bitIndex606))& (v903(VarNext,bitIndex25)<->v48(VarCurr,bitIndex605))& (v903(VarNext,bitIndex24)<->v48(VarCurr,bitIndex604))& (v903(VarNext,bitIndex23)<->v48(VarCurr,bitIndex603))& (v903(VarNext,bitIndex22)<->v48(VarCurr,bitIndex602))& (v903(VarNext,bitIndex21)<->v48(VarCurr,bitIndex601))& (v903(VarNext,bitIndex20)<->v48(VarCurr,bitIndex600))& (v903(VarNext,bitIndex19)<->v48(VarCurr,bitIndex599))& (v903(VarNext,bitIndex18)<->v48(VarCurr,bitIndex598))& (v903(VarNext,bitIndex17)<->v48(VarCurr,bitIndex597))& (v903(VarNext,bitIndex16)<->v48(VarCurr,bitIndex596))& (v903(VarNext,bitIndex15)<->v48(VarCurr,bitIndex595))& (v903(VarNext,bitIndex14)<->v48(VarCurr,bitIndex594))& (v903(VarNext,bitIndex13)<->v48(VarCurr,bitIndex593))& (v903(VarNext,bitIndex12)<->v48(VarCurr,bitIndex592))& (v903(VarNext,bitIndex11)<->v48(VarCurr,bitIndex591))& (v903(VarNext,bitIndex10)<->v48(VarCurr,bitIndex590))& (v903(VarNext,bitIndex9)<->v48(VarCurr,bitIndex589))& (v903(VarNext,bitIndex8)<->v48(VarCurr,bitIndex588))& (v903(VarNext,bitIndex7)<->v48(VarCurr,bitIndex587))& (v903(VarNext,bitIndex6)<->v48(VarCurr,bitIndex586))& (v903(VarNext,bitIndex5)<->v48(VarCurr,bitIndex585))& (v903(VarNext,bitIndex4)<->v48(VarCurr,bitIndex584))& (v903(VarNext,bitIndex3)<->v48(VarCurr,bitIndex583))& (v903(VarNext,bitIndex2)<->v48(VarCurr,bitIndex582))& (v903(VarNext,bitIndex1)<->v48(VarCurr,bitIndex581))& (v903(VarNext,bitIndex0)<->v48(VarCurr,bitIndex580)))).
% 78.29/77.67  all VarNext (v905(VarNext)-> (all B (range_115_0(B)-> (v903(VarNext,B)<->v238(VarNext,B))))).
% 78.29/77.67  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v905(VarNext)<->v906(VarNext)&v233(VarNext))).
% 78.29/77.67  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v906(VarNext)<->v908(VarNext)&v188(VarNext))).
% 78.29/77.67  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v908(VarNext)<->v207(VarNext))).
% 78.29/77.67  all VarNext (v48(VarNext,bitIndex562)<->v895(VarNext,bitIndex98)).
% 78.29/77.67  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v897(VarNext)-> (v895(VarNext,bitIndex115)<->v48(VarCurr,bitIndex579))& (v895(VarNext,bitIndex114)<->v48(VarCurr,bitIndex578))& (v895(VarNext,bitIndex113)<->v48(VarCurr,bitIndex577))& (v895(VarNext,bitIndex112)<->v48(VarCurr,bitIndex576))& (v895(VarNext,bitIndex111)<->v48(VarCurr,bitIndex575))& (v895(VarNext,bitIndex110)<->v48(VarCurr,bitIndex574))& (v895(VarNext,bitIndex109)<->v48(VarCurr,bitIndex573))& (v895(VarNext,bitIndex108)<->v48(VarCurr,bitIndex572))& (v895(VarNext,bitIndex107)<->v48(VarCurr,bitIndex571))& (v895(VarNext,bitIndex106)<->v48(VarCurr,bitIndex570))& (v895(VarNext,bitIndex105)<->v48(VarCurr,bitIndex569))& (v895(VarNext,bitIndex104)<->v48(VarCurr,bitIndex568))& (v895(VarNext,bitIndex103)<->v48(VarCurr,bitIndex567))& (v895(VarNext,bitIndex102)<->v48(VarCurr,bitIndex566))& (v895(VarNext,bitIndex101)<->v48(VarCurr,bitIndex565))& (v895(VarNext,bitIndex100)<->v48(VarCurr,bitIndex564))& (v895(VarNext,bitIndex99)<->v48(VarCurr,bitIndex563))& (v895(VarNext,bitIndex98)<->v48(VarCurr,bitIndex562))& (v895(VarNext,bitIndex97)<->v48(VarCurr,bitIndex561))& (v895(VarNext,bitIndex96)<->v48(VarCurr,bitIndex560))& (v895(VarNext,bitIndex95)<->v48(VarCurr,bitIndex559))& (v895(VarNext,bitIndex94)<->v48(VarCurr,bitIndex558))& (v895(VarNext,bitIndex93)<->v48(VarCurr,bitIndex557))& (v895(VarNext,bitIndex92)<->v48(VarCurr,bitIndex556))& (v895(VarNext,bitIndex91)<->v48(VarCurr,bitIndex555))& (v895(VarNext,bitIndex90)<->v48(VarCurr,bitIndex554))& (v895(VarNext,bitIndex89)<->v48(VarCurr,bitIndex553))& (v895(VarNext,bitIndex88)<->v48(VarCurr,bitIndex552))& (v895(VarNext,bitIndex87)<->v48(VarCurr,bitIndex551))& (v895(VarNext,bitIndex86)<->v48(VarCurr,bitIndex550))& (v895(VarNext,bitIndex85)<->v48(VarCurr,bitIndex549))& (v895(VarNext,bitIndex84)<->v48(VarCurr,bitIndex548))& (v895(VarNext,bitIndex83)<->v48(VarCurr,bitIndex547))& (v895(VarNext,bitIndex82)<->v48(VarCurr,bitIndex546))& (v895(VarNext,bitIndex81)<->v48(VarCurr,bitIndex545))& (v895(VarNext,bitIndex80)<->v48(VarCurr,bitIndex544))& (v895(VarNext,bitIndex79)<->v48(VarCurr,bitIndex543))& (v895(VarNext,bitIndex78)<->v48(VarCurr,bitIndex542))& (v895(VarNext,bitIndex77)<->v48(VarCurr,bitIndex541))& (v895(VarNext,bitIndex76)<->v48(VarCurr,bitIndex540))& (v895(VarNext,bitIndex75)<->v48(VarCurr,bitIndex539))& (v895(VarNext,bitIndex74)<->v48(VarCurr,bitIndex538))& (v895(VarNext,bitIndex73)<->v48(VarCurr,bitIndex537))& (v895(VarNext,bitIndex72)<->v48(VarCurr,bitIndex536))& (v895(VarNext,bitIndex71)<->v48(VarCurr,bitIndex535))& (v895(VarNext,bitIndex70)<->v48(VarCurr,bitIndex534))& (v895(VarNext,bitIndex69)<->v48(VarCurr,bitIndex533))& (v895(VarNext,bitIndex68)<->v48(VarCurr,bitIndex532))& (v895(VarNext,bitIndex67)<->v48(VarCurr,bitIndex531))& (v895(VarNext,bitIndex66)<->v48(VarCurr,bitIndex530))& (v895(VarNext,bitIndex65)<->v48(VarCurr,bitIndex529))& (v895(VarNext,bitIndex64)<->v48(VarCurr,bitIndex528))& (v895(VarNext,bitIndex63)<->v48(VarCurr,bitIndex527))& (v895(VarNext,bitIndex62)<->v48(VarCurr,bitIndex526))& (v895(VarNext,bitIndex61)<->v48(VarCurr,bitIndex525))& (v895(VarNext,bitIndex60)<->v48(VarCurr,bitIndex524))& (v895(VarNext,bitIndex59)<->v48(VarCurr,bitIndex523))& (v895(VarNext,bitIndex58)<->v48(VarCurr,bitIndex522))& (v895(VarNext,bitIndex57)<->v48(VarCurr,bitIndex521))& (v895(VarNext,bitIndex56)<->v48(VarCurr,bitIndex520))& (v895(VarNext,bitIndex55)<->v48(VarCurr,bitIndex519))& (v895(VarNext,bitIndex54)<->v48(VarCurr,bitIndex518))& (v895(VarNext,bitIndex53)<->v48(VarCurr,bitIndex517))& (v895(VarNext,bitIndex52)<->v48(VarCurr,bitIndex516))& (v895(VarNext,bitIndex51)<->v48(VarCurr,bitIndex515))& (v895(VarNext,bitIndex50)<->v48(VarCurr,bitIndex514))& (v895(VarNext,bitIndex49)<->v48(VarCurr,bitIndex513))& (v895(VarNext,bitIndex48)<->v48(VarCurr,bitIndex512))& (v895(VarNext,bitIndex47)<->v48(VarCurr,bitIndex511))& (v895(VarNext,bitIndex46)<->v48(VarCurr,bitIndex510))& (v895(VarNext,bitIndex45)<->v48(VarCurr,bitIndex509))& (v895(VarNext,bitIndex44)<->v48(VarCurr,bitIndex508))& (v895(VarNext,bitIndex43)<->v48(VarCurr,bitIndex507))& (v895(VarNext,bitIndex42)<->v48(VarCurr,bitIndex506))& (v895(VarNext,bitIndex41)<->v48(VarCurr,bitIndex505))& (v895(VarNext,bitIndex40)<->v48(VarCurr,bitIndex504))& (v895(VarNext,bitIndex39)<->v48(VarCurr,bitIndex503))& (v895(VarNext,bitIndex38)<->v48(VarCurr,bitIndex502))& (v895(VarNext,bitIndex37)<->v48(VarCurr,bitIndex501))& (v895(VarNext,bitIndex36)<->v48(VarCurr,bitIndex500))& (v895(VarNext,bitIndex35)<->v48(VarCurr,bitIndex499))& (v895(VarNext,bitIndex34)<->v48(VarCurr,bitIndex498))& (v895(VarNext,bitIndex33)<->v48(VarCurr,bitIndex497))& (v895(VarNext,bitIndex32)<->v48(VarCurr,bitIndex496))& (v895(VarNext,bitIndex31)<->v48(VarCurr,bitIndex495))& (v895(VarNext,bitIndex30)<->v48(VarCurr,bitIndex494))& (v895(VarNext,bitIndex29)<->v48(VarCurr,bitIndex493))& (v895(VarNext,bitIndex28)<->v48(VarCurr,bitIndex492))& (v895(VarNext,bitIndex27)<->v48(VarCurr,bitIndex491))& (v895(VarNext,bitIndex26)<->v48(VarCurr,bitIndex490))& (v895(VarNext,bitIndex25)<->v48(VarCurr,bitIndex489))& (v895(VarNext,bitIndex24)<->v48(VarCurr,bitIndex488))& (v895(VarNext,bitIndex23)<->v48(VarCurr,bitIndex487))& (v895(VarNext,bitIndex22)<->v48(VarCurr,bitIndex486))& (v895(VarNext,bitIndex21)<->v48(VarCurr,bitIndex485))& (v895(VarNext,bitIndex20)<->v48(VarCurr,bitIndex484))& (v895(VarNext,bitIndex19)<->v48(VarCurr,bitIndex483))& (v895(VarNext,bitIndex18)<->v48(VarCurr,bitIndex482))& (v895(VarNext,bitIndex17)<->v48(VarCurr,bitIndex481))& (v895(VarNext,bitIndex16)<->v48(VarCurr,bitIndex480))& (v895(VarNext,bitIndex15)<->v48(VarCurr,bitIndex479))& (v895(VarNext,bitIndex14)<->v48(VarCurr,bitIndex478))& (v895(VarNext,bitIndex13)<->v48(VarCurr,bitIndex477))& (v895(VarNext,bitIndex12)<->v48(VarCurr,bitIndex476))& (v895(VarNext,bitIndex11)<->v48(VarCurr,bitIndex475))& (v895(VarNext,bitIndex10)<->v48(VarCurr,bitIndex474))& (v895(VarNext,bitIndex9)<->v48(VarCurr,bitIndex473))& (v895(VarNext,bitIndex8)<->v48(VarCurr,bitIndex472))& (v895(VarNext,bitIndex7)<->v48(VarCurr,bitIndex471))& (v895(VarNext,bitIndex6)<->v48(VarCurr,bitIndex470))& (v895(VarNext,bitIndex5)<->v48(VarCurr,bitIndex469))& (v895(VarNext,bitIndex4)<->v48(VarCurr,bitIndex468))& (v895(VarNext,bitIndex3)<->v48(VarCurr,bitIndex467))& (v895(VarNext,bitIndex2)<->v48(VarCurr,bitIndex466))& (v895(VarNext,bitIndex1)<->v48(VarCurr,bitIndex465))& (v895(VarNext,bitIndex0)<->v48(VarCurr,bitIndex464)))).
% 78.29/77.67  all VarNext (v897(VarNext)-> (all B (range_115_0(B)-> (v895(VarNext,B)<->v219(VarNext,B))))).
% 78.29/77.67  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v897(VarNext)<->v898(VarNext)&v213(VarNext))).
% 78.29/77.67  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v898(VarNext)<->v900(VarNext)&v188(VarNext))).
% 78.29/77.67  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v900(VarNext)<->v207(VarNext))).
% 78.29/77.67  all VarCurr (v180(VarCurr,bitIndex98)<->v182(VarCurr,bitIndex98)).
% 78.29/77.67  all VarCurr (v182(VarCurr,bitIndex98)<->v184(VarCurr,bitIndex98)).
% 78.29/77.67  all VarCurr (v184(VarCurr,bitIndex98)<->v186(VarCurr,bitIndex98)).
% 78.29/77.67  all VarCurr (v780(VarCurr)<->v892(VarCurr)|v782(VarCurr,bitIndex3)).
% 78.29/77.67  all VarCurr (v892(VarCurr)<->v893(VarCurr)|v782(VarCurr,bitIndex2)).
% 78.29/77.67  all VarCurr (v893(VarCurr)<->v782(VarCurr,bitIndex0)|v782(VarCurr,bitIndex1)).
% 78.29/77.67  all VarCurr B (range_3_0(B)-> (v782(VarCurr,B)<->v784(VarCurr,B))).
% 78.29/77.67  all VarCurr B (range_3_0(B)-> (v784(VarCurr,B)<->v801(VarCurr,B))).
% 78.29/77.67  all VarCurr (v801(VarCurr,bitIndex0)<->v886(VarCurr)).
% 78.29/77.67  all VarCurr (v801(VarCurr,bitIndex1)<->v881(VarCurr)).
% 78.29/77.67  all VarCurr (v801(VarCurr,bitIndex2)<->v876(VarCurr)).
% 78.29/77.67  all VarCurr (v801(VarCurr,bitIndex3)<->v871(VarCurr)).
% 78.29/77.67  all VarCurr (v801(VarCurr,bitIndex4)<->v866(VarCurr)).
% 78.29/77.67  all VarCurr (v801(VarCurr,bitIndex5)<->v861(VarCurr)).
% 78.29/77.67  all VarCurr (v801(VarCurr,bitIndex6)<->v856(VarCurr)).
% 78.29/77.67  all VarCurr (v801(VarCurr,bitIndex7)<->v803(VarCurr)).
% 78.29/77.67  all VarCurr (v886(VarCurr)<->v887(VarCurr)&v889(VarCurr)).
% 78.29/77.67  all VarCurr (v889(VarCurr)<->v460(VarCurr,bitIndex0)|v819(VarCurr,bitIndex0)).
% 78.29/77.67  all VarCurr (v887(VarCurr)<->v546(VarCurr)|v888(VarCurr)).
% 78.29/77.67  all VarCurr (-v888(VarCurr)<->v819(VarCurr,bitIndex0)).
% 78.29/77.67  all VarCurr (v881(VarCurr)<->v882(VarCurr)&v885(VarCurr)).
% 78.29/77.67  all VarCurr (v885(VarCurr)<->v818(VarCurr)|v820(VarCurr)).
% 78.29/77.67  all VarCurr (v882(VarCurr)<->v883(VarCurr)|v884(VarCurr)).
% 78.29/77.67  all VarCurr (-v884(VarCurr)<->v820(VarCurr)).
% 78.29/77.67  all VarCurr (-v883(VarCurr)<->v818(VarCurr)).
% 78.29/77.67  all VarCurr (v876(VarCurr)<->v877(VarCurr)&v880(VarCurr)).
% 78.29/77.67  all VarCurr (v880(VarCurr)<->v816(VarCurr)|v825(VarCurr)).
% 78.29/77.67  all VarCurr (v877(VarCurr)<->v878(VarCurr)|v879(VarCurr)).
% 78.29/77.67  all VarCurr (-v879(VarCurr)<->v825(VarCurr)).
% 78.29/77.67  all VarCurr (-v878(VarCurr)<->v816(VarCurr)).
% 78.29/77.67  all VarCurr (v871(VarCurr)<->v872(VarCurr)&v875(VarCurr)).
% 78.29/77.67  all VarCurr (v875(VarCurr)<->v814(VarCurr)|v830(VarCurr)).
% 78.29/77.67  all VarCurr (v872(VarCurr)<->v873(VarCurr)|v874(VarCurr)).
% 78.29/77.67  all VarCurr (-v874(VarCurr)<->v830(VarCurr)).
% 78.29/77.67  all VarCurr (-v873(VarCurr)<->v814(VarCurr)).
% 78.29/77.67  all VarCurr (v866(VarCurr)<->v867(VarCurr)&v870(VarCurr)).
% 78.29/77.67  all VarCurr (v870(VarCurr)<->v812(VarCurr)|v835(VarCurr)).
% 78.29/77.67  all VarCurr (v867(VarCurr)<->v868(VarCurr)|v869(VarCurr)).
% 78.29/77.67  all VarCurr (-v869(VarCurr)<->v835(VarCurr)).
% 78.29/77.67  all VarCurr (-v868(VarCurr)<->v812(VarCurr)).
% 78.29/77.67  all VarCurr (v861(VarCurr)<->v862(VarCurr)&v865(VarCurr)).
% 78.29/77.67  all VarCurr (v865(VarCurr)<->v810(VarCurr)|v840(VarCurr)).
% 78.29/77.67  all VarCurr (v862(VarCurr)<->v863(VarCurr)|v864(VarCurr)).
% 78.29/77.67  all VarCurr (-v864(VarCurr)<->v840(VarCurr)).
% 78.29/77.68  all VarCurr (-v863(VarCurr)<->v810(VarCurr)).
% 78.29/77.68  all VarCurr (v856(VarCurr)<->v857(VarCurr)&v860(VarCurr)).
% 78.29/77.68  all VarCurr (v860(VarCurr)<->v808(VarCurr)|v845(VarCurr)).
% 78.29/77.68  all VarCurr (v857(VarCurr)<->v858(VarCurr)|v859(VarCurr)).
% 78.29/77.68  all VarCurr (-v859(VarCurr)<->v845(VarCurr)).
% 78.29/77.68  all VarCurr (-v858(VarCurr)<->v808(VarCurr)).
% 78.29/77.68  all VarCurr (v803(VarCurr)<->v804(VarCurr)&v855(VarCurr)).
% 78.29/77.68  all VarCurr (v855(VarCurr)<->v806(VarCurr)|v851(VarCurr)).
% 78.29/77.68  all VarCurr (v804(VarCurr)<->v805(VarCurr)|v850(VarCurr)).
% 78.29/77.68  all VarCurr (-v850(VarCurr)<->v851(VarCurr)).
% 78.29/77.68  all VarCurr (v851(VarCurr)<->v852(VarCurr)&v854(VarCurr)).
% 78.29/77.68  all VarCurr (v854(VarCurr)<->v460(VarCurr,bitIndex7)|v819(VarCurr,bitIndex7)).
% 78.29/77.68  all VarCurr (v852(VarCurr)<->v613(VarCurr)|v853(VarCurr)).
% 78.29/77.68  all VarCurr (-v853(VarCurr)<->v819(VarCurr,bitIndex7)).
% 78.29/77.68  all VarCurr (-v805(VarCurr)<->v806(VarCurr)).
% 78.29/77.68  all VarCurr (v806(VarCurr)<->v807(VarCurr)|v849(VarCurr)).
% 78.29/77.68  all VarCurr (v849(VarCurr)<->v460(VarCurr,bitIndex6)&v819(VarCurr,bitIndex6)).
% 78.29/77.68  all VarCurr (v807(VarCurr)<->v808(VarCurr)&v845(VarCurr)).
% 78.29/77.68  all VarCurr (v845(VarCurr)<->v846(VarCurr)&v848(VarCurr)).
% 78.29/77.68  all VarCurr (v848(VarCurr)<->v460(VarCurr,bitIndex6)|v819(VarCurr,bitIndex6)).
% 78.29/77.68  all VarCurr (v846(VarCurr)<->v618(VarCurr)|v847(VarCurr)).
% 78.29/77.68  all VarCurr (-v847(VarCurr)<->v819(VarCurr,bitIndex6)).
% 78.29/77.68  all VarCurr (v808(VarCurr)<->v809(VarCurr)|v844(VarCurr)).
% 78.29/77.68  all VarCurr (v844(VarCurr)<->v460(VarCurr,bitIndex5)&v819(VarCurr,bitIndex5)).
% 78.29/77.68  all VarCurr (v809(VarCurr)<->v810(VarCurr)&v840(VarCurr)).
% 78.29/77.68  all VarCurr (v840(VarCurr)<->v841(VarCurr)&v843(VarCurr)).
% 78.29/77.68  all VarCurr (v843(VarCurr)<->v460(VarCurr,bitIndex5)|v819(VarCurr,bitIndex5)).
% 78.29/77.68  all VarCurr (v841(VarCurr)<->v623(VarCurr)|v842(VarCurr)).
% 78.29/77.68  all VarCurr (-v842(VarCurr)<->v819(VarCurr,bitIndex5)).
% 78.29/77.68  all VarCurr (v810(VarCurr)<->v811(VarCurr)|v839(VarCurr)).
% 78.29/77.68  all VarCurr (v839(VarCurr)<->v460(VarCurr,bitIndex4)&v819(VarCurr,bitIndex4)).
% 78.29/77.68  all VarCurr (v811(VarCurr)<->v812(VarCurr)&v835(VarCurr)).
% 78.29/77.68  all VarCurr (v835(VarCurr)<->v836(VarCurr)&v838(VarCurr)).
% 78.29/77.68  all VarCurr (v838(VarCurr)<->v460(VarCurr,bitIndex4)|v819(VarCurr,bitIndex4)).
% 78.29/77.68  all VarCurr (v836(VarCurr)<->v628(VarCurr)|v837(VarCurr)).
% 78.29/77.68  all VarCurr (-v837(VarCurr)<->v819(VarCurr,bitIndex4)).
% 78.29/77.68  all VarCurr (v812(VarCurr)<->v813(VarCurr)|v834(VarCurr)).
% 78.29/77.68  all VarCurr (v834(VarCurr)<->v460(VarCurr,bitIndex3)&v819(VarCurr,bitIndex3)).
% 78.29/77.68  all VarCurr (v813(VarCurr)<->v814(VarCurr)&v830(VarCurr)).
% 78.29/77.68  all VarCurr (v830(VarCurr)<->v831(VarCurr)&v833(VarCurr)).
% 78.29/77.68  all VarCurr (v833(VarCurr)<->v460(VarCurr,bitIndex3)|v819(VarCurr,bitIndex3)).
% 78.29/77.68  all VarCurr (v831(VarCurr)<->v633(VarCurr)|v832(VarCurr)).
% 78.29/77.68  all VarCurr (-v832(VarCurr)<->v819(VarCurr,bitIndex3)).
% 78.29/77.68  all VarCurr (v814(VarCurr)<->v815(VarCurr)|v829(VarCurr)).
% 78.29/77.68  all VarCurr (v829(VarCurr)<->v460(VarCurr,bitIndex2)&v819(VarCurr,bitIndex2)).
% 78.29/77.68  all VarCurr (v815(VarCurr)<->v816(VarCurr)&v825(VarCurr)).
% 78.29/77.68  all VarCurr (v825(VarCurr)<->v826(VarCurr)&v828(VarCurr)).
% 78.29/77.68  all VarCurr (v828(VarCurr)<->v460(VarCurr,bitIndex2)|v819(VarCurr,bitIndex2)).
% 78.29/77.68  all VarCurr (v826(VarCurr)<->v632(VarCurr)|v827(VarCurr)).
% 78.29/77.68  all VarCurr (-v827(VarCurr)<->v819(VarCurr,bitIndex2)).
% 78.29/77.68  all VarCurr (v816(VarCurr)<->v817(VarCurr)|v824(VarCurr)).
% 78.29/77.68  all VarCurr (v824(VarCurr)<->v460(VarCurr,bitIndex1)&v819(VarCurr,bitIndex1)).
% 78.29/77.68  all VarCurr (v817(VarCurr)<->v818(VarCurr)&v820(VarCurr)).
% 78.29/77.68  all VarCurr (v820(VarCurr)<->v821(VarCurr)&v823(VarCurr)).
% 78.29/77.68  all VarCurr (v823(VarCurr)<->v460(VarCurr,bitIndex1)|v819(VarCurr,bitIndex1)).
% 78.29/77.68  all VarCurr (v821(VarCurr)<->v540(VarCurr)|v822(VarCurr)).
% 78.29/77.68  all VarCurr (-v822(VarCurr)<->v819(VarCurr,bitIndex1)).
% 78.29/77.68  all VarCurr (v818(VarCurr)<->v460(VarCurr,bitIndex0)&v819(VarCurr,bitIndex0)).
% 78.29/77.68  all VarCurr B (range_3_0(B)-> (v819(VarCurr,B)<->v399(VarCurr,B))).
% 78.29/77.68  all VarCurr ((v819(VarCurr,bitIndex7)<->$F)& (v819(VarCurr,bitIndex6)<->$F)& (v819(VarCurr,bitIndex5)<->$F)& (v819(VarCurr,bitIndex4)<->$F)).
% 78.29/77.68  all VarCurr (v460(VarCurr,bitIndex3)<->v462(VarCurr,bitIndex3)).
% 78.29/77.68  all VarCurr (v462(VarCurr,bitIndex3)<->v464(VarCurr,bitIndex3)).
% 78.29/77.68  all VarCurr (v464(VarCurr,bitIndex3)<->v466(VarCurr,bitIndex3)).
% 78.29/77.68  all VarCurr (v466(VarCurr,bitIndex3)<->v42(VarCurr,bitIndex97)).
% 78.29/77.68  all VarCurr (v42(VarCurr,bitIndex97)<->v44(VarCurr,bitIndex97)).
% 78.29/77.68  all VarCurr (v44(VarCurr,bitIndex97)<->v46(VarCurr,bitIndex97)).
% 78.29/77.68  all VarCurr (v46(VarCurr,bitIndex97)<->v48(VarCurr,bitIndex677)).
% 78.29/77.68  all VarNext (v48(VarNext,bitIndex677)<->v794(VarNext,bitIndex97)).
% 78.29/77.68  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v796(VarNext)-> (v794(VarNext,bitIndex115)<->v48(VarCurr,bitIndex695))& (v794(VarNext,bitIndex114)<->v48(VarCurr,bitIndex694))& (v794(VarNext,bitIndex113)<->v48(VarCurr,bitIndex693))& (v794(VarNext,bitIndex112)<->v48(VarCurr,bitIndex692))& (v794(VarNext,bitIndex111)<->v48(VarCurr,bitIndex691))& (v794(VarNext,bitIndex110)<->v48(VarCurr,bitIndex690))& (v794(VarNext,bitIndex109)<->v48(VarCurr,bitIndex689))& (v794(VarNext,bitIndex108)<->v48(VarCurr,bitIndex688))& (v794(VarNext,bitIndex107)<->v48(VarCurr,bitIndex687))& (v794(VarNext,bitIndex106)<->v48(VarCurr,bitIndex686))& (v794(VarNext,bitIndex105)<->v48(VarCurr,bitIndex685))& (v794(VarNext,bitIndex104)<->v48(VarCurr,bitIndex684))& (v794(VarNext,bitIndex103)<->v48(VarCurr,bitIndex683))& (v794(VarNext,bitIndex102)<->v48(VarCurr,bitIndex682))& (v794(VarNext,bitIndex101)<->v48(VarCurr,bitIndex681))& (v794(VarNext,bitIndex100)<->v48(VarCurr,bitIndex680))& (v794(VarNext,bitIndex99)<->v48(VarCurr,bitIndex679))& (v794(VarNext,bitIndex98)<->v48(VarCurr,bitIndex678))& (v794(VarNext,bitIndex97)<->v48(VarCurr,bitIndex677))& (v794(VarNext,bitIndex96)<->v48(VarCurr,bitIndex676))& (v794(VarNext,bitIndex95)<->v48(VarCurr,bitIndex675))& (v794(VarNext,bitIndex94)<->v48(VarCurr,bitIndex674))& (v794(VarNext,bitIndex93)<->v48(VarCurr,bitIndex673))& (v794(VarNext,bitIndex92)<->v48(VarCurr,bitIndex672))& (v794(VarNext,bitIndex91)<->v48(VarCurr,bitIndex671))& (v794(VarNext,bitIndex90)<->v48(VarCurr,bitIndex670))& (v794(VarNext,bitIndex89)<->v48(VarCurr,bitIndex669))& (v794(VarNext,bitIndex88)<->v48(VarCurr,bitIndex668))& (v794(VarNext,bitIndex87)<->v48(VarCurr,bitIndex667))& (v794(VarNext,bitIndex86)<->v48(VarCurr,bitIndex666))& (v794(VarNext,bitIndex85)<->v48(VarCurr,bitIndex665))& (v794(VarNext,bitIndex84)<->v48(VarCurr,bitIndex664))& (v794(VarNext,bitIndex83)<->v48(VarCurr,bitIndex663))& (v794(VarNext,bitIndex82)<->v48(VarCurr,bitIndex662))& (v794(VarNext,bitIndex81)<->v48(VarCurr,bitIndex661))& (v794(VarNext,bitIndex80)<->v48(VarCurr,bitIndex660))& (v794(VarNext,bitIndex79)<->v48(VarCurr,bitIndex659))& (v794(VarNext,bitIndex78)<->v48(VarCurr,bitIndex658))& (v794(VarNext,bitIndex77)<->v48(VarCurr,bitIndex657))& (v794(VarNext,bitIndex76)<->v48(VarCurr,bitIndex656))& (v794(VarNext,bitIndex75)<->v48(VarCurr,bitIndex655))& (v794(VarNext,bitIndex74)<->v48(VarCurr,bitIndex654))& (v794(VarNext,bitIndex73)<->v48(VarCurr,bitIndex653))& (v794(VarNext,bitIndex72)<->v48(VarCurr,bitIndex652))& (v794(VarNext,bitIndex71)<->v48(VarCurr,bitIndex651))& (v794(VarNext,bitIndex70)<->v48(VarCurr,bitIndex650))& (v794(VarNext,bitIndex69)<->v48(VarCurr,bitIndex649))& (v794(VarNext,bitIndex68)<->v48(VarCurr,bitIndex648))& (v794(VarNext,bitIndex67)<->v48(VarCurr,bitIndex647))& (v794(VarNext,bitIndex66)<->v48(VarCurr,bitIndex646))& (v794(VarNext,bitIndex65)<->v48(VarCurr,bitIndex645))& (v794(VarNext,bitIndex64)<->v48(VarCurr,bitIndex644))& (v794(VarNext,bitIndex63)<->v48(VarCurr,bitIndex643))& (v794(VarNext,bitIndex62)<->v48(VarCurr,bitIndex642))& (v794(VarNext,bitIndex61)<->v48(VarCurr,bitIndex641))& (v794(VarNext,bitIndex60)<->v48(VarCurr,bitIndex640))& (v794(VarNext,bitIndex59)<->v48(VarCurr,bitIndex639))& (v794(VarNext,bitIndex58)<->v48(VarCurr,bitIndex638))& (v794(VarNext,bitIndex57)<->v48(VarCurr,bitIndex637))& (v794(VarNext,bitIndex56)<->v48(VarCurr,bitIndex636))& (v794(VarNext,bitIndex55)<->v48(VarCurr,bitIndex635))& (v794(VarNext,bitIndex54)<->v48(VarCurr,bitIndex634))& (v794(VarNext,bitIndex53)<->v48(VarCurr,bitIndex633))& (v794(VarNext,bitIndex52)<->v48(VarCurr,bitIndex632))& (v794(VarNext,bitIndex51)<->v48(VarCurr,bitIndex631))& (v794(VarNext,bitIndex50)<->v48(VarCurr,bitIndex630))& (v794(VarNext,bitIndex49)<->v48(VarCurr,bitIndex629))& (v794(VarNext,bitIndex48)<->v48(VarCurr,bitIndex628))& (v794(VarNext,bitIndex47)<->v48(VarCurr,bitIndex627))& (v794(VarNext,bitIndex46)<->v48(VarCurr,bitIndex626))& (v794(VarNext,bitIndex45)<->v48(VarCurr,bitIndex625))& (v794(VarNext,bitIndex44)<->v48(VarCurr,bitIndex624))& (v794(VarNext,bitIndex43)<->v48(VarCurr,bitIndex623))& (v794(VarNext,bitIndex42)<->v48(VarCurr,bitIndex622))& (v794(VarNext,bitIndex41)<->v48(VarCurr,bitIndex621))& (v794(VarNext,bitIndex40)<->v48(VarCurr,bitIndex620))& (v794(VarNext,bitIndex39)<->v48(VarCurr,bitIndex619))& (v794(VarNext,bitIndex38)<->v48(VarCurr,bitIndex618))& (v794(VarNext,bitIndex37)<->v48(VarCurr,bitIndex617))& (v794(VarNext,bitIndex36)<->v48(VarCurr,bitIndex616))& (v794(VarNext,bitIndex35)<->v48(VarCurr,bitIndex615))& (v794(VarNext,bitIndex34)<->v48(VarCurr,bitIndex614))& (v794(VarNext,bitIndex33)<->v48(VarCurr,bitIndex613))& (v794(VarNext,bitIndex32)<->v48(VarCurr,bitIndex612))& (v794(VarNext,bitIndex31)<->v48(VarCurr,bitIndex611))& (v794(VarNext,bitIndex30)<->v48(VarCurr,bitIndex610))& (v794(VarNext,bitIndex29)<->v48(VarCurr,bitIndex609))& (v794(VarNext,bitIndex28)<->v48(VarCurr,bitIndex608))& (v794(VarNext,bitIndex27)<->v48(VarCurr,bitIndex607))& (v794(VarNext,bitIndex26)<->v48(VarCurr,bitIndex606))& (v794(VarNext,bitIndex25)<->v48(VarCurr,bitIndex605))& (v794(VarNext,bitIndex24)<->v48(VarCurr,bitIndex604))& (v794(VarNext,bitIndex23)<->v48(VarCurr,bitIndex603))& (v794(VarNext,bitIndex22)<->v48(VarCurr,bitIndex602))& (v794(VarNext,bitIndex21)<->v48(VarCurr,bitIndex601))& (v794(VarNext,bitIndex20)<->v48(VarCurr,bitIndex600))& (v794(VarNext,bitIndex19)<->v48(VarCurr,bitIndex599))& (v794(VarNext,bitIndex18)<->v48(VarCurr,bitIndex598))& (v794(VarNext,bitIndex17)<->v48(VarCurr,bitIndex597))& (v794(VarNext,bitIndex16)<->v48(VarCurr,bitIndex596))& (v794(VarNext,bitIndex15)<->v48(VarCurr,bitIndex595))& (v794(VarNext,bitIndex14)<->v48(VarCurr,bitIndex594))& (v794(VarNext,bitIndex13)<->v48(VarCurr,bitIndex593))& (v794(VarNext,bitIndex12)<->v48(VarCurr,bitIndex592))& (v794(VarNext,bitIndex11)<->v48(VarCurr,bitIndex591))& (v794(VarNext,bitIndex10)<->v48(VarCurr,bitIndex590))& (v794(VarNext,bitIndex9)<->v48(VarCurr,bitIndex589))& (v794(VarNext,bitIndex8)<->v48(VarCurr,bitIndex588))& (v794(VarNext,bitIndex7)<->v48(VarCurr,bitIndex587))& (v794(VarNext,bitIndex6)<->v48(VarCurr,bitIndex586))& (v794(VarNext,bitIndex5)<->v48(VarCurr,bitIndex585))& (v794(VarNext,bitIndex4)<->v48(VarCurr,bitIndex584))& (v794(VarNext,bitIndex3)<->v48(VarCurr,bitIndex583))& (v794(VarNext,bitIndex2)<->v48(VarCurr,bitIndex582))& (v794(VarNext,bitIndex1)<->v48(VarCurr,bitIndex581))& (v794(VarNext,bitIndex0)<->v48(VarCurr,bitIndex580)))).
% 78.29/77.68  all VarNext (v796(VarNext)-> (all B (range_115_0(B)-> (v794(VarNext,B)<->v238(VarNext,B))))).
% 78.29/77.68  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v796(VarNext)<->v797(VarNext)&v233(VarNext))).
% 78.29/77.68  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v797(VarNext)<->v799(VarNext)&v188(VarNext))).
% 78.29/77.68  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v799(VarNext)<->v207(VarNext))).
% 78.29/77.68  all VarNext (v48(VarNext,bitIndex561)<->v786(VarNext,bitIndex97)).
% 78.29/77.68  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v788(VarNext)-> (v786(VarNext,bitIndex115)<->v48(VarCurr,bitIndex579))& (v786(VarNext,bitIndex114)<->v48(VarCurr,bitIndex578))& (v786(VarNext,bitIndex113)<->v48(VarCurr,bitIndex577))& (v786(VarNext,bitIndex112)<->v48(VarCurr,bitIndex576))& (v786(VarNext,bitIndex111)<->v48(VarCurr,bitIndex575))& (v786(VarNext,bitIndex110)<->v48(VarCurr,bitIndex574))& (v786(VarNext,bitIndex109)<->v48(VarCurr,bitIndex573))& (v786(VarNext,bitIndex108)<->v48(VarCurr,bitIndex572))& (v786(VarNext,bitIndex107)<->v48(VarCurr,bitIndex571))& (v786(VarNext,bitIndex106)<->v48(VarCurr,bitIndex570))& (v786(VarNext,bitIndex105)<->v48(VarCurr,bitIndex569))& (v786(VarNext,bitIndex104)<->v48(VarCurr,bitIndex568))& (v786(VarNext,bitIndex103)<->v48(VarCurr,bitIndex567))& (v786(VarNext,bitIndex102)<->v48(VarCurr,bitIndex566))& (v786(VarNext,bitIndex101)<->v48(VarCurr,bitIndex565))& (v786(VarNext,bitIndex100)<->v48(VarCurr,bitIndex564))& (v786(VarNext,bitIndex99)<->v48(VarCurr,bitIndex563))& (v786(VarNext,bitIndex98)<->v48(VarCurr,bitIndex562))& (v786(VarNext,bitIndex97)<->v48(VarCurr,bitIndex561))& (v786(VarNext,bitIndex96)<->v48(VarCurr,bitIndex560))& (v786(VarNext,bitIndex95)<->v48(VarCurr,bitIndex559))& (v786(VarNext,bitIndex94)<->v48(VarCurr,bitIndex558))& (v786(VarNext,bitIndex93)<->v48(VarCurr,bitIndex557))& (v786(VarNext,bitIndex92)<->v48(VarCurr,bitIndex556))& (v786(VarNext,bitIndex91)<->v48(VarCurr,bitIndex555))& (v786(VarNext,bitIndex90)<->v48(VarCurr,bitIndex554))& (v786(VarNext,bitIndex89)<->v48(VarCurr,bitIndex553))& (v786(VarNext,bitIndex88)<->v48(VarCurr,bitIndex552))& (v786(VarNext,bitIndex87)<->v48(VarCurr,bitIndex551))& (v786(VarNext,bitIndex86)<->v48(VarCurr,bitIndex550))& (v786(VarNext,bitIndex85)<->v48(VarCurr,bitIndex549))& (v786(VarNext,bitIndex84)<->v48(VarCurr,bitIndex548))& (v786(VarNext,bitIndex83)<->v48(VarCurr,bitIndex547))& (v786(VarNext,bitIndex82)<->v48(VarCurr,bitIndex546))& (v786(VarNext,bitIndex81)<->v48(VarCurr,bitIndex545))& (v786(VarNext,bitIndex80)<->v48(VarCurr,bitIndex544))& (v786(VarNext,bitIndex79)<->v48(VarCurr,bitIndex543))& (v786(VarNext,bitIndex78)<->v48(VarCurr,bitIndex542))& (v786(VarNext,bitIndex77)<->v48(VarCurr,bitIndex541))& (v786(VarNext,bitIndex76)<->v48(VarCurr,bitIndex540))& (v786(VarNext,bitIndex75)<->v48(VarCurr,bitIndex539))& (v786(VarNext,bitIndex74)<->v48(VarCurr,bitIndex538))& (v786(VarNext,bitIndex73)<->v48(VarCurr,bitIndex537))& (v786(VarNext,bitIndex72)<->v48(VarCurr,bitIndex536))& (v786(VarNext,bitIndex71)<->v48(VarCurr,bitIndex535))& (v786(VarNext,bitIndex70)<->v48(VarCurr,bitIndex534))& (v786(VarNext,bitIndex69)<->v48(VarCurr,bitIndex533))& (v786(VarNext,bitIndex68)<->v48(VarCurr,bitIndex532))& (v786(VarNext,bitIndex67)<->v48(VarCurr,bitIndex531))& (v786(VarNext,bitIndex66)<->v48(VarCurr,bitIndex530))& (v786(VarNext,bitIndex65)<->v48(VarCurr,bitIndex529))& (v786(VarNext,bitIndex64)<->v48(VarCurr,bitIndex528))& (v786(VarNext,bitIndex63)<->v48(VarCurr,bitIndex527))& (v786(VarNext,bitIndex62)<->v48(VarCurr,bitIndex526))& (v786(VarNext,bitIndex61)<->v48(VarCurr,bitIndex525))& (v786(VarNext,bitIndex60)<->v48(VarCurr,bitIndex524))& (v786(VarNext,bitIndex59)<->v48(VarCurr,bitIndex523))& (v786(VarNext,bitIndex58)<->v48(VarCurr,bitIndex522))& (v786(VarNext,bitIndex57)<->v48(VarCurr,bitIndex521))& (v786(VarNext,bitIndex56)<->v48(VarCurr,bitIndex520))& (v786(VarNext,bitIndex55)<->v48(VarCurr,bitIndex519))& (v786(VarNext,bitIndex54)<->v48(VarCurr,bitIndex518))& (v786(VarNext,bitIndex53)<->v48(VarCurr,bitIndex517))& (v786(VarNext,bitIndex52)<->v48(VarCurr,bitIndex516))& (v786(VarNext,bitIndex51)<->v48(VarCurr,bitIndex515))& (v786(VarNext,bitIndex50)<->v48(VarCurr,bitIndex514))& (v786(VarNext,bitIndex49)<->v48(VarCurr,bitIndex513))& (v786(VarNext,bitIndex48)<->v48(VarCurr,bitIndex512))& (v786(VarNext,bitIndex47)<->v48(VarCurr,bitIndex511))& (v786(VarNext,bitIndex46)<->v48(VarCurr,bitIndex510))& (v786(VarNext,bitIndex45)<->v48(VarCurr,bitIndex509))& (v786(VarNext,bitIndex44)<->v48(VarCurr,bitIndex508))& (v786(VarNext,bitIndex43)<->v48(VarCurr,bitIndex507))& (v786(VarNext,bitIndex42)<->v48(VarCurr,bitIndex506))& (v786(VarNext,bitIndex41)<->v48(VarCurr,bitIndex505))& (v786(VarNext,bitIndex40)<->v48(VarCurr,bitIndex504))& (v786(VarNext,bitIndex39)<->v48(VarCurr,bitIndex503))& (v786(VarNext,bitIndex38)<->v48(VarCurr,bitIndex502))& (v786(VarNext,bitIndex37)<->v48(VarCurr,bitIndex501))& (v786(VarNext,bitIndex36)<->v48(VarCurr,bitIndex500))& (v786(VarNext,bitIndex35)<->v48(VarCurr,bitIndex499))& (v786(VarNext,bitIndex34)<->v48(VarCurr,bitIndex498))& (v786(VarNext,bitIndex33)<->v48(VarCurr,bitIndex497))& (v786(VarNext,bitIndex32)<->v48(VarCurr,bitIndex496))& (v786(VarNext,bitIndex31)<->v48(VarCurr,bitIndex495))& (v786(VarNext,bitIndex30)<->v48(VarCurr,bitIndex494))& (v786(VarNext,bitIndex29)<->v48(VarCurr,bitIndex493))& (v786(VarNext,bitIndex28)<->v48(VarCurr,bitIndex492))& (v786(VarNext,bitIndex27)<->v48(VarCurr,bitIndex491))& (v786(VarNext,bitIndex26)<->v48(VarCurr,bitIndex490))& (v786(VarNext,bitIndex25)<->v48(VarCurr,bitIndex489))& (v786(VarNext,bitIndex24)<->v48(VarCurr,bitIndex488))& (v786(VarNext,bitIndex23)<->v48(VarCurr,bitIndex487))& (v786(VarNext,bitIndex22)<->v48(VarCurr,bitIndex486))& (v786(VarNext,bitIndex21)<->v48(VarCurr,bitIndex485))& (v786(VarNext,bitIndex20)<->v48(VarCurr,bitIndex484))& (v786(VarNext,bitIndex19)<->v48(VarCurr,bitIndex483))& (v786(VarNext,bitIndex18)<->v48(VarCurr,bitIndex482))& (v786(VarNext,bitIndex17)<->v48(VarCurr,bitIndex481))& (v786(VarNext,bitIndex16)<->v48(VarCurr,bitIndex480))& (v786(VarNext,bitIndex15)<->v48(VarCurr,bitIndex479))& (v786(VarNext,bitIndex14)<->v48(VarCurr,bitIndex478))& (v786(VarNext,bitIndex13)<->v48(VarCurr,bitIndex477))& (v786(VarNext,bitIndex12)<->v48(VarCurr,bitIndex476))& (v786(VarNext,bitIndex11)<->v48(VarCurr,bitIndex475))& (v786(VarNext,bitIndex10)<->v48(VarCurr,bitIndex474))& (v786(VarNext,bitIndex9)<->v48(VarCurr,bitIndex473))& (v786(VarNext,bitIndex8)<->v48(VarCurr,bitIndex472))& (v786(VarNext,bitIndex7)<->v48(VarCurr,bitIndex471))& (v786(VarNext,bitIndex6)<->v48(VarCurr,bitIndex470))& (v786(VarNext,bitIndex5)<->v48(VarCurr,bitIndex469))& (v786(VarNext,bitIndex4)<->v48(VarCurr,bitIndex468))& (v786(VarNext,bitIndex3)<->v48(VarCurr,bitIndex467))& (v786(VarNext,bitIndex2)<->v48(VarCurr,bitIndex466))& (v786(VarNext,bitIndex1)<->v48(VarCurr,bitIndex465))& (v786(VarNext,bitIndex0)<->v48(VarCurr,bitIndex464)))).
% 78.29/77.68  all VarNext (v788(VarNext)-> (all B (range_115_0(B)-> (v786(VarNext,B)<->v219(VarNext,B))))).
% 78.29/77.68  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v788(VarNext)<->v789(VarNext)&v213(VarNext))).
% 78.29/77.68  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v789(VarNext)<->v791(VarNext)&v188(VarNext))).
% 78.29/77.68  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v791(VarNext)<->v207(VarNext))).
% 78.29/77.68  all VarCurr (v180(VarCurr,bitIndex97)<->v182(VarCurr,bitIndex97)).
% 78.29/77.68  all VarCurr (v182(VarCurr,bitIndex97)<->v184(VarCurr,bitIndex97)).
% 78.29/77.68  all VarCurr (v184(VarCurr,bitIndex97)<->v186(VarCurr,bitIndex97)).
% 78.29/77.68  all VarCurr (v775(VarCurr,bitIndex1)<->v776(VarCurr,bitIndex1)).
% 78.29/77.68  all VarCurr (v775(VarCurr,bitIndex0)<->v776(VarCurr,bitIndex0)).
% 78.29/77.68  all VarCurr B (range_1_0(B)-> (v776(VarCurr,B)<->$F)).
% 78.29/77.68  all VarCurr ((v776(VarCurr,bitIndex5)<->v777(VarCurr,bitIndex3))& (v776(VarCurr,bitIndex4)<->v777(VarCurr,bitIndex2))& (v776(VarCurr,bitIndex3)<->v777(VarCurr,bitIndex1))& (v776(VarCurr,bitIndex2)<->v777(VarCurr,bitIndex0))).
% 78.29/77.68  all VarCurr (-v24(VarCurr)-> (v772(VarCurr)<->v338(VarCurr))).
% 78.29/77.68  all VarCurr (v24(VarCurr)-> (v772(VarCurr)<->v340(VarCurr))).
% 78.29/77.68  all VarCurr (v452(VarCurr)<->v763(VarCurr)|v448(VarCurr,bitIndex5)).
% 78.29/77.68  all VarCurr (v763(VarCurr)<->v764(VarCurr)|v448(VarCurr,bitIndex4)).
% 78.29/77.68  all VarCurr (v764(VarCurr)<->v765(VarCurr)|v448(VarCurr,bitIndex3)).
% 78.29/77.68  all VarCurr (v765(VarCurr)<->v766(VarCurr)|v448(VarCurr,bitIndex2)).
% 78.29/77.68  all VarCurr (v766(VarCurr)<->v448(VarCurr,bitIndex0)|v448(VarCurr,bitIndex1)).
% 78.29/77.68  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v669(VarNext)-> (all B (range_5_0(B)-> (v448(VarNext,B)<->v448(VarCurr,B)))))).
% 78.29/77.68  all VarNext (v669(VarNext)-> (all B (range_5_0(B)-> (v448(VarNext,B)<->v686(VarNext,B))))).
% 78.29/77.68  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_5_0(B)-> (v686(VarNext,B)<->v684(VarCurr,B))))).
% 78.29/77.68  all VarCurr (-v678(VarCurr)-> (all B (range_5_0(B)-> (v684(VarCurr,B)<->v687(VarCurr,B))))).
% 78.29/77.68  all VarCurr (v678(VarCurr)-> (all B (range_5_0(B)-> (v684(VarCurr,B)<->$F)))).
% 78.29/77.68  all VarCurr (-v24(VarCurr)-> (all B (range_5_0(B)-> (v687(VarCurr,B)<->v724(VarCurr,B))))).
% 78.29/77.68  all VarCurr (v24(VarCurr)-> (all B (range_5_0(B)-> (v687(VarCurr,B)<->v688(VarCurr,B))))).
% 78.29/77.68  all VarCurr (v724(VarCurr,bitIndex0)<->v757(VarCurr)).
% 78.29/77.68  all VarCurr (v724(VarCurr,bitIndex1)<->v755(VarCurr)).
% 78.29/77.68  all VarCurr (v724(VarCurr,bitIndex2)<->v751(VarCurr)).
% 78.29/77.68  all VarCurr (v724(VarCurr,bitIndex3)<->v747(VarCurr)).
% 78.29/77.68  all VarCurr (v724(VarCurr,bitIndex4)<->v743(VarCurr)).
% 78.29/77.68  all VarCurr (v724(VarCurr,bitIndex5)<->v726(VarCurr)).
% 78.29/77.68  all VarCurr (v755(VarCurr)<->v756(VarCurr)&v758(VarCurr)).
% 78.29/77.68  all VarCurr (v758(VarCurr)<->v448(VarCurr,bitIndex0)|v737(VarCurr)).
% 78.29/77.68  all VarCurr (v756(VarCurr)<->v757(VarCurr)|v448(VarCurr,bitIndex1)).
% 78.29/77.68  all VarCurr (-v757(VarCurr)<->v448(VarCurr,bitIndex0)).
% 78.29/77.68  all VarCurr (v751(VarCurr)<->v752(VarCurr)&v754(VarCurr)).
% 78.29/77.69  all VarCurr (v754(VarCurr)<->v735(VarCurr)|v738(VarCurr)).
% 78.29/77.69  all VarCurr (v752(VarCurr)<->v753(VarCurr)|v448(VarCurr,bitIndex2)).
% 78.29/77.69  all VarCurr (-v753(VarCurr)<->v735(VarCurr)).
% 78.29/77.69  all VarCurr (v747(VarCurr)<->v748(VarCurr)&v750(VarCurr)).
% 78.29/77.69  all VarCurr (v750(VarCurr)<->v733(VarCurr)|v739(VarCurr)).
% 78.29/77.69  all VarCurr (v748(VarCurr)<->v749(VarCurr)|v448(VarCurr,bitIndex3)).
% 78.29/77.69  all VarCurr (-v749(VarCurr)<->v733(VarCurr)).
% 78.29/77.69  all VarCurr (v743(VarCurr)<->v744(VarCurr)&v746(VarCurr)).
% 78.29/77.69  all VarCurr (v746(VarCurr)<->v731(VarCurr)|v740(VarCurr)).
% 78.29/77.69  all VarCurr (v744(VarCurr)<->v745(VarCurr)|v448(VarCurr,bitIndex4)).
% 78.29/77.69  all VarCurr (-v745(VarCurr)<->v731(VarCurr)).
% 78.29/77.69  all VarCurr (v726(VarCurr)<->v727(VarCurr)&v741(VarCurr)).
% 78.29/77.69  all VarCurr (v741(VarCurr)<->v729(VarCurr)|v742(VarCurr)).
% 78.29/77.69  all VarCurr (-v742(VarCurr)<->v448(VarCurr,bitIndex5)).
% 78.29/77.69  all VarCurr (v727(VarCurr)<->v728(VarCurr)|v448(VarCurr,bitIndex5)).
% 78.29/77.69  all VarCurr (-v728(VarCurr)<->v729(VarCurr)).
% 78.29/77.69  all VarCurr (v729(VarCurr)<->v448(VarCurr,bitIndex4)|v730(VarCurr)).
% 78.29/77.69  all VarCurr (v730(VarCurr)<->v731(VarCurr)&v740(VarCurr)).
% 78.29/77.69  all VarCurr (-v740(VarCurr)<->v448(VarCurr,bitIndex4)).
% 78.29/77.69  all VarCurr (v731(VarCurr)<->v448(VarCurr,bitIndex3)|v732(VarCurr)).
% 78.29/77.69  all VarCurr (v732(VarCurr)<->v733(VarCurr)&v739(VarCurr)).
% 78.29/77.69  all VarCurr (-v739(VarCurr)<->v448(VarCurr,bitIndex3)).
% 78.29/77.69  all VarCurr (v733(VarCurr)<->v448(VarCurr,bitIndex2)|v734(VarCurr)).
% 78.29/77.69  all VarCurr (v734(VarCurr)<->v735(VarCurr)&v738(VarCurr)).
% 78.29/77.69  all VarCurr (-v738(VarCurr)<->v448(VarCurr,bitIndex2)).
% 78.29/77.69  all VarCurr (v735(VarCurr)<->v448(VarCurr,bitIndex1)|v736(VarCurr)).
% 78.29/77.69  all VarCurr (v736(VarCurr)<->v448(VarCurr,bitIndex0)&v737(VarCurr)).
% 78.29/77.69  all VarCurr (-v737(VarCurr)<->v448(VarCurr,bitIndex1)).
% 78.29/77.69  all VarCurr (-v450(VarCurr)-> (all B (range_5_0(B)-> (v688(VarCurr,B)<->v454(VarCurr,B))))).
% 78.29/77.69  all VarCurr (v450(VarCurr)-> (all B (range_5_0(B)-> (v688(VarCurr,B)<->v689(VarCurr,B))))).
% 78.29/77.69  all VarCurr (v689(VarCurr,bitIndex0)<->v722(VarCurr)).
% 78.29/77.69  all VarCurr (v689(VarCurr,bitIndex1)<->v720(VarCurr)).
% 78.29/77.69  all VarCurr (v689(VarCurr,bitIndex2)<->v716(VarCurr)).
% 78.29/77.69  all VarCurr (v689(VarCurr,bitIndex3)<->v712(VarCurr)).
% 78.29/77.69  all VarCurr (v689(VarCurr,bitIndex4)<->v708(VarCurr)).
% 78.29/77.69  all VarCurr (v689(VarCurr,bitIndex5)<->v691(VarCurr)).
% 78.29/77.69  all VarCurr (v720(VarCurr)<->v721(VarCurr)&v723(VarCurr)).
% 78.29/77.69  all VarCurr (v723(VarCurr)<->v454(VarCurr,bitIndex0)|v702(VarCurr)).
% 78.29/77.69  all VarCurr (v721(VarCurr)<->v722(VarCurr)|v454(VarCurr,bitIndex1)).
% 78.29/77.69  all VarCurr (-v722(VarCurr)<->v454(VarCurr,bitIndex0)).
% 78.29/77.69  all VarCurr (v716(VarCurr)<->v717(VarCurr)&v719(VarCurr)).
% 78.29/77.69  all VarCurr (v719(VarCurr)<->v700(VarCurr)|v703(VarCurr)).
% 78.29/77.69  all VarCurr (v717(VarCurr)<->v718(VarCurr)|v454(VarCurr,bitIndex2)).
% 78.29/77.69  all VarCurr (-v718(VarCurr)<->v700(VarCurr)).
% 78.29/77.69  all VarCurr (v712(VarCurr)<->v713(VarCurr)&v715(VarCurr)).
% 78.29/77.69  all VarCurr (v715(VarCurr)<->v698(VarCurr)|v704(VarCurr)).
% 78.29/77.69  all VarCurr (v713(VarCurr)<->v714(VarCurr)|v454(VarCurr,bitIndex3)).
% 78.29/77.69  all VarCurr (-v714(VarCurr)<->v698(VarCurr)).
% 78.29/77.69  all VarCurr (v708(VarCurr)<->v709(VarCurr)&v711(VarCurr)).
% 78.29/77.69  all VarCurr (v711(VarCurr)<->v696(VarCurr)|v705(VarCurr)).
% 78.29/77.69  all VarCurr (v709(VarCurr)<->v710(VarCurr)|v454(VarCurr,bitIndex4)).
% 78.29/77.69  all VarCurr (-v710(VarCurr)<->v696(VarCurr)).
% 78.29/77.69  all VarCurr (v691(VarCurr)<->v692(VarCurr)&v706(VarCurr)).
% 78.29/77.69  all VarCurr (v706(VarCurr)<->v694(VarCurr)|v707(VarCurr)).
% 78.29/77.69  all VarCurr (-v707(VarCurr)<->v454(VarCurr,bitIndex5)).
% 78.29/77.69  all VarCurr (v692(VarCurr)<->v693(VarCurr)|v454(VarCurr,bitIndex5)).
% 78.29/77.69  all VarCurr (-v693(VarCurr)<->v694(VarCurr)).
% 78.29/77.69  all VarCurr (v694(VarCurr)<->v454(VarCurr,bitIndex4)|v695(VarCurr)).
% 78.29/77.69  all VarCurr (v695(VarCurr)<->v696(VarCurr)&v705(VarCurr)).
% 78.29/77.69  all VarCurr (-v705(VarCurr)<->v454(VarCurr,bitIndex4)).
% 78.29/77.69  all VarCurr (v696(VarCurr)<->v454(VarCurr,bitIndex3)|v697(VarCurr)).
% 78.29/77.69  all VarCurr (v697(VarCurr)<->v698(VarCurr)&v704(VarCurr)).
% 78.29/77.69  all VarCurr (-v704(VarCurr)<->v454(VarCurr,bitIndex3)).
% 78.29/77.69  all VarCurr (v698(VarCurr)<->v454(VarCurr,bitIndex2)|v699(VarCurr)).
% 78.29/77.69  all VarCurr (v699(VarCurr)<->v700(VarCurr)&v703(VarCurr)).
% 78.29/77.69  all VarCurr (-v703(VarCurr)<->v454(VarCurr,bitIndex2)).
% 78.29/77.69  all VarCurr (v700(VarCurr)<->v454(VarCurr,bitIndex1)|v701(VarCurr)).
% 78.29/77.69  all VarCurr (v701(VarCurr)<->v454(VarCurr,bitIndex0)&v702(VarCurr)).
% 78.29/77.69  all VarCurr (-v702(VarCurr)<->v454(VarCurr,bitIndex1)).
% 78.29/77.69  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v669(VarNext)<->v670(VarNext)&v677(VarNext))).
% 78.29/77.69  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v677(VarNext)<->v675(VarCurr))).
% 78.29/77.69  all VarCurr (v675(VarCurr)<->v678(VarCurr)|v679(VarCurr)).
% 78.29/77.69  all VarCurr (v679(VarCurr)<->v680(VarCurr)&v683(VarCurr)).
% 78.29/77.69  all VarCurr (-v683(VarCurr)<->v678(VarCurr)).
% 78.29/77.69  all VarCurr (v680(VarCurr)<->v24(VarCurr)|v681(VarCurr)).
% 78.29/77.69  all VarCurr (v681(VarCurr)<->v450(VarCurr)&v682(VarCurr)).
% 78.29/77.69  all VarCurr (-v682(VarCurr)<->v24(VarCurr)).
% 78.29/77.69  all VarCurr (-v678(VarCurr)<->v15(VarCurr)).
% 78.29/77.69  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v670(VarNext)<->v671(VarNext)&v355(VarNext))).
% 78.29/77.69  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v671(VarNext)<->v362(VarNext))).
% 78.29/77.69  all B (range_5_0(B)-> (v448(constB0,B)<->$F)).
% 78.29/77.69  all VarCurr (v454(VarCurr,bitIndex0)<->v635(VarCurr,bitIndex0)).
% 78.29/77.69  all VarCurr (-v456(VarCurr)-> (v635(VarCurr,bitIndex5)<->v458(VarCurr,bitIndex7))& (v635(VarCurr,bitIndex4)<->v458(VarCurr,bitIndex6))& (v635(VarCurr,bitIndex3)<->v458(VarCurr,bitIndex5))& (v635(VarCurr,bitIndex2)<->v458(VarCurr,bitIndex4))& (v635(VarCurr,bitIndex1)<->v458(VarCurr,bitIndex3))& (v635(VarCurr,bitIndex0)<->v458(VarCurr,bitIndex2))).
% 78.29/77.69  all VarCurr (v456(VarCurr)-> (all B (range_5_0(B)-> (v635(VarCurr,B)<->v636(VarCurr,B))))).
% 78.29/77.69  all VarCurr (v636(VarCurr,bitIndex0)<->v664(VarCurr)).
% 78.29/77.69  all VarCurr (v636(VarCurr,bitIndex1)<->v662(VarCurr)).
% 78.29/77.69  all VarCurr (v636(VarCurr,bitIndex2)<->v657(VarCurr)).
% 78.29/77.69  all VarCurr (v636(VarCurr,bitIndex3)<->v652(VarCurr)).
% 78.29/77.69  all VarCurr (v636(VarCurr,bitIndex4)<->v647(VarCurr)).
% 78.29/77.69  all VarCurr (v636(VarCurr,bitIndex5)<->v638(VarCurr)).
% 78.29/77.69  all VarCurr (v662(VarCurr)<->v663(VarCurr)&v666(VarCurr)).
% 78.29/77.69  all VarCurr (v666(VarCurr)<->v458(VarCurr,bitIndex2)|v458(VarCurr,bitIndex3)).
% 78.29/77.69  all VarCurr (v663(VarCurr)<->v664(VarCurr)|v665(VarCurr)).
% 78.29/77.69  all VarCurr (-v665(VarCurr)<->v458(VarCurr,bitIndex3)).
% 78.29/77.69  all VarCurr (-v664(VarCurr)<->v458(VarCurr,bitIndex2)).
% 78.29/77.69  all VarCurr (v657(VarCurr)<->v658(VarCurr)&v661(VarCurr)).
% 78.29/77.69  all VarCurr (v661(VarCurr)<->v644(VarCurr)|v458(VarCurr,bitIndex4)).
% 78.29/77.69  all VarCurr (v658(VarCurr)<->v659(VarCurr)|v660(VarCurr)).
% 78.29/77.69  all VarCurr (-v660(VarCurr)<->v458(VarCurr,bitIndex4)).
% 78.29/77.69  all VarCurr (-v659(VarCurr)<->v644(VarCurr)).
% 78.29/77.69  all VarCurr (v652(VarCurr)<->v653(VarCurr)&v656(VarCurr)).
% 78.29/77.69  all VarCurr (v656(VarCurr)<->v643(VarCurr)|v458(VarCurr,bitIndex5)).
% 78.29/77.69  all VarCurr (v653(VarCurr)<->v654(VarCurr)|v655(VarCurr)).
% 78.29/77.69  all VarCurr (-v655(VarCurr)<->v458(VarCurr,bitIndex5)).
% 78.29/77.69  all VarCurr (-v654(VarCurr)<->v643(VarCurr)).
% 78.29/77.69  all VarCurr (v647(VarCurr)<->v648(VarCurr)&v651(VarCurr)).
% 78.29/77.69  all VarCurr (v651(VarCurr)<->v642(VarCurr)|v458(VarCurr,bitIndex6)).
% 78.29/77.69  all VarCurr (v648(VarCurr)<->v649(VarCurr)|v650(VarCurr)).
% 78.29/77.69  all VarCurr (-v650(VarCurr)<->v458(VarCurr,bitIndex6)).
% 78.29/77.69  all VarCurr (-v649(VarCurr)<->v642(VarCurr)).
% 78.29/77.69  all VarCurr (v638(VarCurr)<->v639(VarCurr)&v646(VarCurr)).
% 78.29/77.69  all VarCurr (v646(VarCurr)<->v641(VarCurr)|v458(VarCurr,bitIndex7)).
% 78.29/77.69  all VarCurr (v639(VarCurr)<->v640(VarCurr)|v645(VarCurr)).
% 78.29/77.69  all VarCurr (-v645(VarCurr)<->v458(VarCurr,bitIndex7)).
% 78.29/77.69  all VarCurr (-v640(VarCurr)<->v641(VarCurr)).
% 78.29/77.69  all VarCurr (v641(VarCurr)<->v642(VarCurr)&v458(VarCurr,bitIndex6)).
% 78.29/77.69  all VarCurr (v642(VarCurr)<->v643(VarCurr)&v458(VarCurr,bitIndex5)).
% 78.29/77.69  all VarCurr (v643(VarCurr)<->v644(VarCurr)&v458(VarCurr,bitIndex4)).
% 78.29/77.69  all VarCurr (v644(VarCurr)<->v458(VarCurr,bitIndex2)&v458(VarCurr,bitIndex3)).
% 78.29/77.69  all VarCurr (v458(VarCurr,bitIndex2)<->v603(VarCurr,bitIndex0)).
% 78.29/77.69  all VarCurr (-v552(VarCurr)-> (v603(VarCurr,bitIndex5)<->v460(VarCurr,bitIndex7))& (v603(VarCurr,bitIndex4)<->v460(VarCurr,bitIndex6))& (v603(VarCurr,bitIndex3)<->v460(VarCurr,bitIndex5))& (v603(VarCurr,bitIndex2)<->v460(VarCurr,bitIndex4))& (v603(VarCurr,bitIndex1)<->v460(VarCurr,bitIndex3))& (v603(VarCurr,bitIndex0)<->v460(VarCurr,bitIndex2))).
% 78.29/77.69  all VarCurr (v552(VarCurr)-> (all B (range_5_0(B)-> (v603(VarCurr,B)<->v604(VarCurr,B))))).
% 78.29/77.69  all VarCurr (v604(VarCurr,bitIndex0)<->v632(VarCurr)).
% 78.29/77.69  all VarCurr (v604(VarCurr,bitIndex1)<->v630(VarCurr)).
% 78.29/77.69  all VarCurr (v604(VarCurr,bitIndex2)<->v625(VarCurr)).
% 78.29/77.69  all VarCurr (v604(VarCurr,bitIndex3)<->v620(VarCurr)).
% 78.29/77.69  all VarCurr (v604(VarCurr,bitIndex4)<->v615(VarCurr)).
% 78.29/77.69  all VarCurr (v604(VarCurr,bitIndex5)<->v606(VarCurr)).
% 78.29/77.69  all VarCurr (v630(VarCurr)<->v631(VarCurr)&v634(VarCurr)).
% 78.29/77.69  all VarCurr (v634(VarCurr)<->v460(VarCurr,bitIndex2)|v460(VarCurr,bitIndex3)).
% 78.29/77.69  all VarCurr (v631(VarCurr)<->v632(VarCurr)|v633(VarCurr)).
% 78.29/77.69  all VarCurr (-v633(VarCurr)<->v460(VarCurr,bitIndex3)).
% 78.29/77.69  all VarCurr (-v632(VarCurr)<->v460(VarCurr,bitIndex2)).
% 78.29/77.69  all VarCurr (v625(VarCurr)<->v626(VarCurr)&v629(VarCurr)).
% 78.29/77.69  all VarCurr (v629(VarCurr)<->v612(VarCurr)|v460(VarCurr,bitIndex4)).
% 78.29/77.69  all VarCurr (v626(VarCurr)<->v627(VarCurr)|v628(VarCurr)).
% 78.29/77.69  all VarCurr (-v628(VarCurr)<->v460(VarCurr,bitIndex4)).
% 78.29/77.69  all VarCurr (-v627(VarCurr)<->v612(VarCurr)).
% 78.29/77.69  all VarCurr (v620(VarCurr)<->v621(VarCurr)&v624(VarCurr)).
% 78.29/77.69  all VarCurr (v624(VarCurr)<->v611(VarCurr)|v460(VarCurr,bitIndex5)).
% 78.29/77.69  all VarCurr (v621(VarCurr)<->v622(VarCurr)|v623(VarCurr)).
% 78.29/77.69  all VarCurr (-v623(VarCurr)<->v460(VarCurr,bitIndex5)).
% 78.29/77.69  all VarCurr (-v622(VarCurr)<->v611(VarCurr)).
% 78.29/77.69  all VarCurr (v615(VarCurr)<->v616(VarCurr)&v619(VarCurr)).
% 78.29/77.69  all VarCurr (v619(VarCurr)<->v610(VarCurr)|v460(VarCurr,bitIndex6)).
% 78.29/77.69  all VarCurr (v616(VarCurr)<->v617(VarCurr)|v618(VarCurr)).
% 78.29/77.69  all VarCurr (-v618(VarCurr)<->v460(VarCurr,bitIndex6)).
% 78.29/77.69  all VarCurr (-v617(VarCurr)<->v610(VarCurr)).
% 78.29/77.69  all VarCurr (v606(VarCurr)<->v607(VarCurr)&v614(VarCurr)).
% 78.29/77.69  all VarCurr (v614(VarCurr)<->v609(VarCurr)|v460(VarCurr,bitIndex7)).
% 78.29/77.69  all VarCurr (v607(VarCurr)<->v608(VarCurr)|v613(VarCurr)).
% 78.29/77.69  all VarCurr (-v613(VarCurr)<->v460(VarCurr,bitIndex7)).
% 78.29/77.69  all VarCurr (-v608(VarCurr)<->v609(VarCurr)).
% 78.29/77.69  all VarCurr (v609(VarCurr)<->v610(VarCurr)&v460(VarCurr,bitIndex6)).
% 78.29/77.69  all VarCurr (v610(VarCurr)<->v611(VarCurr)&v460(VarCurr,bitIndex5)).
% 78.29/77.69  all VarCurr (v611(VarCurr)<->v612(VarCurr)&v460(VarCurr,bitIndex4)).
% 78.29/77.69  all VarCurr (v612(VarCurr)<->v460(VarCurr,bitIndex2)&v460(VarCurr,bitIndex3)).
% 78.29/77.69  all VarCurr (v460(VarCurr,bitIndex2)<->v462(VarCurr,bitIndex2)).
% 78.29/77.69  all VarCurr (v462(VarCurr,bitIndex2)<->v464(VarCurr,bitIndex2)).
% 78.29/77.69  all VarCurr (v464(VarCurr,bitIndex2)<->v466(VarCurr,bitIndex2)).
% 78.29/77.69  all VarCurr (v466(VarCurr,bitIndex2)<->v42(VarCurr,bitIndex96)).
% 78.29/77.69  all VarCurr (v42(VarCurr,bitIndex96)<->v44(VarCurr,bitIndex96)).
% 78.29/77.69  all VarCurr (v44(VarCurr,bitIndex96)<->v46(VarCurr,bitIndex96)).
% 78.29/77.69  all VarCurr (v46(VarCurr,bitIndex96)<->v48(VarCurr,bitIndex676)).
% 78.29/77.69  all VarNext (v48(VarNext,bitIndex676)<->v595(VarNext,bitIndex96)).
% 78.29/77.69  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v597(VarNext)-> (v595(VarNext,bitIndex115)<->v48(VarCurr,bitIndex695))& (v595(VarNext,bitIndex114)<->v48(VarCurr,bitIndex694))& (v595(VarNext,bitIndex113)<->v48(VarCurr,bitIndex693))& (v595(VarNext,bitIndex112)<->v48(VarCurr,bitIndex692))& (v595(VarNext,bitIndex111)<->v48(VarCurr,bitIndex691))& (v595(VarNext,bitIndex110)<->v48(VarCurr,bitIndex690))& (v595(VarNext,bitIndex109)<->v48(VarCurr,bitIndex689))& (v595(VarNext,bitIndex108)<->v48(VarCurr,bitIndex688))& (v595(VarNext,bitIndex107)<->v48(VarCurr,bitIndex687))& (v595(VarNext,bitIndex106)<->v48(VarCurr,bitIndex686))& (v595(VarNext,bitIndex105)<->v48(VarCurr,bitIndex685))& (v595(VarNext,bitIndex104)<->v48(VarCurr,bitIndex684))& (v595(VarNext,bitIndex103)<->v48(VarCurr,bitIndex683))& (v595(VarNext,bitIndex102)<->v48(VarCurr,bitIndex682))& (v595(VarNext,bitIndex101)<->v48(VarCurr,bitIndex681))& (v595(VarNext,bitIndex100)<->v48(VarCurr,bitIndex680))& (v595(VarNext,bitIndex99)<->v48(VarCurr,bitIndex679))& (v595(VarNext,bitIndex98)<->v48(VarCurr,bitIndex678))& (v595(VarNext,bitIndex97)<->v48(VarCurr,bitIndex677))& (v595(VarNext,bitIndex96)<->v48(VarCurr,bitIndex676))& (v595(VarNext,bitIndex95)<->v48(VarCurr,bitIndex675))& (v595(VarNext,bitIndex94)<->v48(VarCurr,bitIndex674))& (v595(VarNext,bitIndex93)<->v48(VarCurr,bitIndex673))& (v595(VarNext,bitIndex92)<->v48(VarCurr,bitIndex672))& (v595(VarNext,bitIndex91)<->v48(VarCurr,bitIndex671))& (v595(VarNext,bitIndex90)<->v48(VarCurr,bitIndex670))& (v595(VarNext,bitIndex89)<->v48(VarCurr,bitIndex669))& (v595(VarNext,bitIndex88)<->v48(VarCurr,bitIndex668))& (v595(VarNext,bitIndex87)<->v48(VarCurr,bitIndex667))& (v595(VarNext,bitIndex86)<->v48(VarCurr,bitIndex666))& (v595(VarNext,bitIndex85)<->v48(VarCurr,bitIndex665))& (v595(VarNext,bitIndex84)<->v48(VarCurr,bitIndex664))& (v595(VarNext,bitIndex83)<->v48(VarCurr,bitIndex663))& (v595(VarNext,bitIndex82)<->v48(VarCurr,bitIndex662))& (v595(VarNext,bitIndex81)<->v48(VarCurr,bitIndex661))& (v595(VarNext,bitIndex80)<->v48(VarCurr,bitIndex660))& (v595(VarNext,bitIndex79)<->v48(VarCurr,bitIndex659))& (v595(VarNext,bitIndex78)<->v48(VarCurr,bitIndex658))& (v595(VarNext,bitIndex77)<->v48(VarCurr,bitIndex657))& (v595(VarNext,bitIndex76)<->v48(VarCurr,bitIndex656))& (v595(VarNext,bitIndex75)<->v48(VarCurr,bitIndex655))& (v595(VarNext,bitIndex74)<->v48(VarCurr,bitIndex654))& (v595(VarNext,bitIndex73)<->v48(VarCurr,bitIndex653))& (v595(VarNext,bitIndex72)<->v48(VarCurr,bitIndex652))& (v595(VarNext,bitIndex71)<->v48(VarCurr,bitIndex651))& (v595(VarNext,bitIndex70)<->v48(VarCurr,bitIndex650))& (v595(VarNext,bitIndex69)<->v48(VarCurr,bitIndex649))& (v595(VarNext,bitIndex68)<->v48(VarCurr,bitIndex648))& (v595(VarNext,bitIndex67)<->v48(VarCurr,bitIndex647))& (v595(VarNext,bitIndex66)<->v48(VarCurr,bitIndex646))& (v595(VarNext,bitIndex65)<->v48(VarCurr,bitIndex645))& (v595(VarNext,bitIndex64)<->v48(VarCurr,bitIndex644))& (v595(VarNext,bitIndex63)<->v48(VarCurr,bitIndex643))& (v595(VarNext,bitIndex62)<->v48(VarCurr,bitIndex642))& (v595(VarNext,bitIndex61)<->v48(VarCurr,bitIndex641))& (v595(VarNext,bitIndex60)<->v48(VarCurr,bitIndex640))& (v595(VarNext,bitIndex59)<->v48(VarCurr,bitIndex639))& (v595(VarNext,bitIndex58)<->v48(VarCurr,bitIndex638))& (v595(VarNext,bitIndex57)<->v48(VarCurr,bitIndex637))& (v595(VarNext,bitIndex56)<->v48(VarCurr,bitIndex636))& (v595(VarNext,bitIndex55)<->v48(VarCurr,bitIndex635))& (v595(VarNext,bitIndex54)<->v48(VarCurr,bitIndex634))& (v595(VarNext,bitIndex53)<->v48(VarCurr,bitIndex633))& (v595(VarNext,bitIndex52)<->v48(VarCurr,bitIndex632))& (v595(VarNext,bitIndex51)<->v48(VarCurr,bitIndex631))& (v595(VarNext,bitIndex50)<->v48(VarCurr,bitIndex630))& (v595(VarNext,bitIndex49)<->v48(VarCurr,bitIndex629))& (v595(VarNext,bitIndex48)<->v48(VarCurr,bitIndex628))& (v595(VarNext,bitIndex47)<->v48(VarCurr,bitIndex627))& (v595(VarNext,bitIndex46)<->v48(VarCurr,bitIndex626))& (v595(VarNext,bitIndex45)<->v48(VarCurr,bitIndex625))& (v595(VarNext,bitIndex44)<->v48(VarCurr,bitIndex624))& (v595(VarNext,bitIndex43)<->v48(VarCurr,bitIndex623))& (v595(VarNext,bitIndex42)<->v48(VarCurr,bitIndex622))& (v595(VarNext,bitIndex41)<->v48(VarCurr,bitIndex621))& (v595(VarNext,bitIndex40)<->v48(VarCurr,bitIndex620))& (v595(VarNext,bitIndex39)<->v48(VarCurr,bitIndex619))& (v595(VarNext,bitIndex38)<->v48(VarCurr,bitIndex618))& (v595(VarNext,bitIndex37)<->v48(VarCurr,bitIndex617))& (v595(VarNext,bitIndex36)<->v48(VarCurr,bitIndex616))& (v595(VarNext,bitIndex35)<->v48(VarCurr,bitIndex615))& (v595(VarNext,bitIndex34)<->v48(VarCurr,bitIndex614))& (v595(VarNext,bitIndex33)<->v48(VarCurr,bitIndex613))& (v595(VarNext,bitIndex32)<->v48(VarCurr,bitIndex612))& (v595(VarNext,bitIndex31)<->v48(VarCurr,bitIndex611))& (v595(VarNext,bitIndex30)<->v48(VarCurr,bitIndex610))& (v595(VarNext,bitIndex29)<->v48(VarCurr,bitIndex609))& (v595(VarNext,bitIndex28)<->v48(VarCurr,bitIndex608))& (v595(VarNext,bitIndex27)<->v48(VarCurr,bitIndex607))& (v595(VarNext,bitIndex26)<->v48(VarCurr,bitIndex606))& (v595(VarNext,bitIndex25)<->v48(VarCurr,bitIndex605))& (v595(VarNext,bitIndex24)<->v48(VarCurr,bitIndex604))& (v595(VarNext,bitIndex23)<->v48(VarCurr,bitIndex603))& (v595(VarNext,bitIndex22)<->v48(VarCurr,bitIndex602))& (v595(VarNext,bitIndex21)<->v48(VarCurr,bitIndex601))& (v595(VarNext,bitIndex20)<->v48(VarCurr,bitIndex600))& (v595(VarNext,bitIndex19)<->v48(VarCurr,bitIndex599))& (v595(VarNext,bitIndex18)<->v48(VarCurr,bitIndex598))& (v595(VarNext,bitIndex17)<->v48(VarCurr,bitIndex597))& (v595(VarNext,bitIndex16)<->v48(VarCurr,bitIndex596))& (v595(VarNext,bitIndex15)<->v48(VarCurr,bitIndex595))& (v595(VarNext,bitIndex14)<->v48(VarCurr,bitIndex594))& (v595(VarNext,bitIndex13)<->v48(VarCurr,bitIndex593))& (v595(VarNext,bitIndex12)<->v48(VarCurr,bitIndex592))& (v595(VarNext,bitIndex11)<->v48(VarCurr,bitIndex591))& (v595(VarNext,bitIndex10)<->v48(VarCurr,bitIndex590))& (v595(VarNext,bitIndex9)<->v48(VarCurr,bitIndex589))& (v595(VarNext,bitIndex8)<->v48(VarCurr,bitIndex588))& (v595(VarNext,bitIndex7)<->v48(VarCurr,bitIndex587))& (v595(VarNext,bitIndex6)<->v48(VarCurr,bitIndex586))& (v595(VarNext,bitIndex5)<->v48(VarCurr,bitIndex585))& (v595(VarNext,bitIndex4)<->v48(VarCurr,bitIndex584))& (v595(VarNext,bitIndex3)<->v48(VarCurr,bitIndex583))& (v595(VarNext,bitIndex2)<->v48(VarCurr,bitIndex582))& (v595(VarNext,bitIndex1)<->v48(VarCurr,bitIndex581))& (v595(VarNext,bitIndex0)<->v48(VarCurr,bitIndex580)))).
% 78.29/77.70  all VarNext (v597(VarNext)-> (all B (range_115_0(B)-> (v595(VarNext,B)<->v238(VarNext,B))))).
% 78.29/77.70  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v597(VarNext)<->v598(VarNext)&v233(VarNext))).
% 78.29/77.70  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v598(VarNext)<->v600(VarNext)&v188(VarNext))).
% 78.29/77.70  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v600(VarNext)<->v207(VarNext))).
% 78.29/77.70  all VarNext (v48(VarNext,bitIndex560)<->v587(VarNext,bitIndex96)).
% 78.29/77.70  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v589(VarNext)-> (v587(VarNext,bitIndex115)<->v48(VarCurr,bitIndex579))& (v587(VarNext,bitIndex114)<->v48(VarCurr,bitIndex578))& (v587(VarNext,bitIndex113)<->v48(VarCurr,bitIndex577))& (v587(VarNext,bitIndex112)<->v48(VarCurr,bitIndex576))& (v587(VarNext,bitIndex111)<->v48(VarCurr,bitIndex575))& (v587(VarNext,bitIndex110)<->v48(VarCurr,bitIndex574))& (v587(VarNext,bitIndex109)<->v48(VarCurr,bitIndex573))& (v587(VarNext,bitIndex108)<->v48(VarCurr,bitIndex572))& (v587(VarNext,bitIndex107)<->v48(VarCurr,bitIndex571))& (v587(VarNext,bitIndex106)<->v48(VarCurr,bitIndex570))& (v587(VarNext,bitIndex105)<->v48(VarCurr,bitIndex569))& (v587(VarNext,bitIndex104)<->v48(VarCurr,bitIndex568))& (v587(VarNext,bitIndex103)<->v48(VarCurr,bitIndex567))& (v587(VarNext,bitIndex102)<->v48(VarCurr,bitIndex566))& (v587(VarNext,bitIndex101)<->v48(VarCurr,bitIndex565))& (v587(VarNext,bitIndex100)<->v48(VarCurr,bitIndex564))& (v587(VarNext,bitIndex99)<->v48(VarCurr,bitIndex563))& (v587(VarNext,bitIndex98)<->v48(VarCurr,bitIndex562))& (v587(VarNext,bitIndex97)<->v48(VarCurr,bitIndex561))& (v587(VarNext,bitIndex96)<->v48(VarCurr,bitIndex560))& (v587(VarNext,bitIndex95)<->v48(VarCurr,bitIndex559))& (v587(VarNext,bitIndex94)<->v48(VarCurr,bitIndex558))& (v587(VarNext,bitIndex93)<->v48(VarCurr,bitIndex557))& (v587(VarNext,bitIndex92)<->v48(VarCurr,bitIndex556))& (v587(VarNext,bitIndex91)<->v48(VarCurr,bitIndex555))& (v587(VarNext,bitIndex90)<->v48(VarCurr,bitIndex554))& (v587(VarNext,bitIndex89)<->v48(VarCurr,bitIndex553))& (v587(VarNext,bitIndex88)<->v48(VarCurr,bitIndex552))& (v587(VarNext,bitIndex87)<->v48(VarCurr,bitIndex551))& (v587(VarNext,bitIndex86)<->v48(VarCurr,bitIndex550))& (v587(VarNext,bitIndex85)<->v48(VarCurr,bitIndex549))& (v587(VarNext,bitIndex84)<->v48(VarCurr,bitIndex548))& (v587(VarNext,bitIndex83)<->v48(VarCurr,bitIndex547))& (v587(VarNext,bitIndex82)<->v48(VarCurr,bitIndex546))& (v587(VarNext,bitIndex81)<->v48(VarCurr,bitIndex545))& (v587(VarNext,bitIndex80)<->v48(VarCurr,bitIndex544))& (v587(VarNext,bitIndex79)<->v48(VarCurr,bitIndex543))& (v587(VarNext,bitIndex78)<->v48(VarCurr,bitIndex542))& (v587(VarNext,bitIndex77)<->v48(VarCurr,bitIndex541))& (v587(VarNext,bitIndex76)<->v48(VarCurr,bitIndex540))& (v587(VarNext,bitIndex75)<->v48(VarCurr,bitIndex539))& (v587(VarNext,bitIndex74)<->v48(VarCurr,bitIndex538))& (v587(VarNext,bitIndex73)<->v48(VarCurr,bitIndex537))& (v587(VarNext,bitIndex72)<->v48(VarCurr,bitIndex536))& (v587(VarNext,bitIndex71)<->v48(VarCurr,bitIndex535))& (v587(VarNext,bitIndex70)<->v48(VarCurr,bitIndex534))& (v587(VarNext,bitIndex69)<->v48(VarCurr,bitIndex533))& (v587(VarNext,bitIndex68)<->v48(VarCurr,bitIndex532))& (v587(VarNext,bitIndex67)<->v48(VarCurr,bitIndex531))& (v587(VarNext,bitIndex66)<->v48(VarCurr,bitIndex530))& (v587(VarNext,bitIndex65)<->v48(VarCurr,bitIndex529))& (v587(VarNext,bitIndex64)<->v48(VarCurr,bitIndex528))& (v587(VarNext,bitIndex63)<->v48(VarCurr,bitIndex527))& (v587(VarNext,bitIndex62)<->v48(VarCurr,bitIndex526))& (v587(VarNext,bitIndex61)<->v48(VarCurr,bitIndex525))& (v587(VarNext,bitIndex60)<->v48(VarCurr,bitIndex524))& (v587(VarNext,bitIndex59)<->v48(VarCurr,bitIndex523))& (v587(VarNext,bitIndex58)<->v48(VarCurr,bitIndex522))& (v587(VarNext,bitIndex57)<->v48(VarCurr,bitIndex521))& (v587(VarNext,bitIndex56)<->v48(VarCurr,bitIndex520))& (v587(VarNext,bitIndex55)<->v48(VarCurr,bitIndex519))& (v587(VarNext,bitIndex54)<->v48(VarCurr,bitIndex518))& (v587(VarNext,bitIndex53)<->v48(VarCurr,bitIndex517))& (v587(VarNext,bitIndex52)<->v48(VarCurr,bitIndex516))& (v587(VarNext,bitIndex51)<->v48(VarCurr,bitIndex515))& (v587(VarNext,bitIndex50)<->v48(VarCurr,bitIndex514))& (v587(VarNext,bitIndex49)<->v48(VarCurr,bitIndex513))& (v587(VarNext,bitIndex48)<->v48(VarCurr,bitIndex512))& (v587(VarNext,bitIndex47)<->v48(VarCurr,bitIndex511))& (v587(VarNext,bitIndex46)<->v48(VarCurr,bitIndex510))& (v587(VarNext,bitIndex45)<->v48(VarCurr,bitIndex509))& (v587(VarNext,bitIndex44)<->v48(VarCurr,bitIndex508))& (v587(VarNext,bitIndex43)<->v48(VarCurr,bitIndex507))& (v587(VarNext,bitIndex42)<->v48(VarCurr,bitIndex506))& (v587(VarNext,bitIndex41)<->v48(VarCurr,bitIndex505))& (v587(VarNext,bitIndex40)<->v48(VarCurr,bitIndex504))& (v587(VarNext,bitIndex39)<->v48(VarCurr,bitIndex503))& (v587(VarNext,bitIndex38)<->v48(VarCurr,bitIndex502))& (v587(VarNext,bitIndex37)<->v48(VarCurr,bitIndex501))& (v587(VarNext,bitIndex36)<->v48(VarCurr,bitIndex500))& (v587(VarNext,bitIndex35)<->v48(VarCurr,bitIndex499))& (v587(VarNext,bitIndex34)<->v48(VarCurr,bitIndex498))& (v587(VarNext,bitIndex33)<->v48(VarCurr,bitIndex497))& (v587(VarNext,bitIndex32)<->v48(VarCurr,bitIndex496))& (v587(VarNext,bitIndex31)<->v48(VarCurr,bitIndex495))& (v587(VarNext,bitIndex30)<->v48(VarCurr,bitIndex494))& (v587(VarNext,bitIndex29)<->v48(VarCurr,bitIndex493))& (v587(VarNext,bitIndex28)<->v48(VarCurr,bitIndex492))& (v587(VarNext,bitIndex27)<->v48(VarCurr,bitIndex491))& (v587(VarNext,bitIndex26)<->v48(VarCurr,bitIndex490))& (v587(VarNext,bitIndex25)<->v48(VarCurr,bitIndex489))& (v587(VarNext,bitIndex24)<->v48(VarCurr,bitIndex488))& (v587(VarNext,bitIndex23)<->v48(VarCurr,bitIndex487))& (v587(VarNext,bitIndex22)<->v48(VarCurr,bitIndex486))& (v587(VarNext,bitIndex21)<->v48(VarCurr,bitIndex485))& (v587(VarNext,bitIndex20)<->v48(VarCurr,bitIndex484))& (v587(VarNext,bitIndex19)<->v48(VarCurr,bitIndex483))& (v587(VarNext,bitIndex18)<->v48(VarCurr,bitIndex482))& (v587(VarNext,bitIndex17)<->v48(VarCurr,bitIndex481))& (v587(VarNext,bitIndex16)<->v48(VarCurr,bitIndex480))& (v587(VarNext,bitIndex15)<->v48(VarCurr,bitIndex479))& (v587(VarNext,bitIndex14)<->v48(VarCurr,bitIndex478))& (v587(VarNext,bitIndex13)<->v48(VarCurr,bitIndex477))& (v587(VarNext,bitIndex12)<->v48(VarCurr,bitIndex476))& (v587(VarNext,bitIndex11)<->v48(VarCurr,bitIndex475))& (v587(VarNext,bitIndex10)<->v48(VarCurr,bitIndex474))& (v587(VarNext,bitIndex9)<->v48(VarCurr,bitIndex473))& (v587(VarNext,bitIndex8)<->v48(VarCurr,bitIndex472))& (v587(VarNext,bitIndex7)<->v48(VarCurr,bitIndex471))& (v587(VarNext,bitIndex6)<->v48(VarCurr,bitIndex470))& (v587(VarNext,bitIndex5)<->v48(VarCurr,bitIndex469))& (v587(VarNext,bitIndex4)<->v48(VarCurr,bitIndex468))& (v587(VarNext,bitIndex3)<->v48(VarCurr,bitIndex467))& (v587(VarNext,bitIndex2)<->v48(VarCurr,bitIndex466))& (v587(VarNext,bitIndex1)<->v48(VarCurr,bitIndex465))& (v587(VarNext,bitIndex0)<->v48(VarCurr,bitIndex464)))).
% 78.37/77.70  all VarNext (v589(VarNext)-> (all B (range_115_0(B)-> (v587(VarNext,B)<->v219(VarNext,B))))).
% 78.37/77.70  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v589(VarNext)<->v590(VarNext)&v213(VarNext))).
% 78.37/77.70  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v590(VarNext)<->v592(VarNext)&v188(VarNext))).
% 78.37/77.70  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v592(VarNext)<->v207(VarNext))).
% 78.37/77.70  all VarCurr (v180(VarCurr,bitIndex96)<->v182(VarCurr,bitIndex96)).
% 78.37/77.70  all VarCurr (v182(VarCurr,bitIndex96)<->v184(VarCurr,bitIndex96)).
% 78.37/77.70  all VarCurr (v184(VarCurr,bitIndex96)<->v186(VarCurr,bitIndex96)).
% 78.37/77.70  all VarCurr (v552(VarCurr)<->v553(VarCurr,bitIndex2)).
% 78.37/77.70  all VarCurr (v553(VarCurr,bitIndex0)<->v581(VarCurr)).
% 78.37/77.70  all VarCurr (v553(VarCurr,bitIndex1)<->v576(VarCurr)).
% 78.37/77.70  all VarCurr (v553(VarCurr,bitIndex2)<->v555(VarCurr)).
% 78.37/77.70  all VarCurr (v581(VarCurr)<->v582(VarCurr)&v585(VarCurr)).
% 78.37/77.70  all VarCurr (v585(VarCurr)<->v561(VarCurr,bitIndex0)|v562(VarCurr,bitIndex0)).
% 78.37/77.70  all VarCurr (v582(VarCurr)<->v583(VarCurr)|v584(VarCurr)).
% 78.37/77.70  all VarCurr (-v584(VarCurr)<->v562(VarCurr,bitIndex0)).
% 78.37/77.70  all VarCurr (-v583(VarCurr)<->v561(VarCurr,bitIndex0)).
% 78.37/77.70  all VarCurr (v576(VarCurr)<->v577(VarCurr)&v580(VarCurr)).
% 78.37/77.70  all VarCurr (v580(VarCurr)<->v560(VarCurr)|v563(VarCurr)).
% 78.37/77.70  all VarCurr (v577(VarCurr)<->v578(VarCurr)|v579(VarCurr)).
% 78.37/77.70  all VarCurr (-v579(VarCurr)<->v563(VarCurr)).
% 78.37/77.70  all VarCurr (-v578(VarCurr)<->v560(VarCurr)).
% 78.37/77.70  all VarCurr (v555(VarCurr)<->v556(VarCurr)&v575(VarCurr)).
% 78.37/77.70  all VarCurr (v575(VarCurr)<->v558(VarCurr)|v570(VarCurr)).
% 78.37/77.70  all VarCurr (v556(VarCurr)<->v557(VarCurr)|v569(VarCurr)).
% 78.37/77.70  all VarCurr (-v569(VarCurr)<->v570(VarCurr)).
% 78.37/77.70  all VarCurr (v570(VarCurr)<->v571(VarCurr)&v574(VarCurr)).
% 78.37/77.70  all VarCurr (v574(VarCurr)<->v561(VarCurr,bitIndex2)|v562(VarCurr,bitIndex2)).
% 78.37/77.70  all VarCurr (v571(VarCurr)<->v572(VarCurr)|v573(VarCurr)).
% 78.37/77.70  all VarCurr (-v573(VarCurr)<->v562(VarCurr,bitIndex2)).
% 78.37/77.70  all VarCurr (-v572(VarCurr)<->v561(VarCurr,bitIndex2)).
% 78.37/77.70  all VarCurr (-v557(VarCurr)<->v558(VarCurr)).
% 78.37/77.70  all VarCurr (v558(VarCurr)<->v559(VarCurr)|v568(VarCurr)).
% 78.37/77.70  all VarCurr (v568(VarCurr)<->v561(VarCurr,bitIndex1)&v562(VarCurr,bitIndex1)).
% 78.37/77.70  all VarCurr (v559(VarCurr)<->v560(VarCurr)&v563(VarCurr)).
% 78.37/77.70  all VarCurr (v563(VarCurr)<->v564(VarCurr)&v567(VarCurr)).
% 78.37/77.70  all VarCurr (v567(VarCurr)<->v561(VarCurr,bitIndex1)|v562(VarCurr,bitIndex1)).
% 78.37/77.70  all VarCurr (v564(VarCurr)<->v565(VarCurr)|v566(VarCurr)).
% 78.37/77.70  all VarCurr (-v566(VarCurr)<->v562(VarCurr,bitIndex1)).
% 78.37/77.70  all VarCurr (-v565(VarCurr)<->v561(VarCurr,bitIndex1)).
% 78.37/77.70  all VarCurr (v560(VarCurr)<->v561(VarCurr,bitIndex0)&v562(VarCurr,bitIndex0)).
% 78.37/77.70  all VarCurr B (range_1_0(B)-> (v562(VarCurr,B)<->v399(VarCurr,B))).
% 78.37/77.70  all VarCurr (v562(VarCurr,bitIndex2)<->$F).
% 78.37/77.70  all VarCurr B (range_1_0(B)-> (v561(VarCurr,B)<->v460(VarCurr,B))).
% 78.37/77.70  all VarCurr (v561(VarCurr,bitIndex2)<->$F).
% 78.37/77.70  all VarCurr (v456(VarCurr)<->v458(VarCurr,bitIndex0)|v458(VarCurr,bitIndex1)).
% 78.37/77.70  all VarCurr (v458(VarCurr,bitIndex0)<->v544(VarCurr)).
% 78.37/77.70  all VarCurr (v458(VarCurr,bitIndex1)<->v533(VarCurr)).
% 78.37/77.70  all VarCurr (v544(VarCurr)<->v545(VarCurr)&v548(VarCurr)).
% 78.37/77.70  all VarCurr (v548(VarCurr)<->v460(VarCurr,bitIndex0)|v399(VarCurr,bitIndex0)).
% 78.37/77.70  all VarCurr (v545(VarCurr)<->v546(VarCurr)|v547(VarCurr)).
% 78.37/77.70  all VarCurr (-v547(VarCurr)<->v399(VarCurr,bitIndex0)).
% 78.37/77.70  all VarCurr (-v546(VarCurr)<->v460(VarCurr,bitIndex0)).
% 78.37/77.70  all VarCurr (v533(VarCurr)<->v534(VarCurr)&v543(VarCurr)).
% 78.37/77.70  all VarCurr (v543(VarCurr)<->v536(VarCurr)|v538(VarCurr)).
% 78.37/77.70  all VarCurr (v534(VarCurr)<->v535(VarCurr)|v537(VarCurr)).
% 78.37/77.70  all VarCurr (-v537(VarCurr)<->v538(VarCurr)).
% 78.37/77.70  all VarCurr (v538(VarCurr)<->v539(VarCurr)&v542(VarCurr)).
% 78.37/77.70  all VarCurr (v542(VarCurr)<->v460(VarCurr,bitIndex1)|v399(VarCurr,bitIndex1)).
% 78.37/77.70  all VarCurr (v539(VarCurr)<->v540(VarCurr)|v541(VarCurr)).
% 78.37/77.70  all VarCurr (-v541(VarCurr)<->v399(VarCurr,bitIndex1)).
% 78.37/77.70  all VarCurr (-v540(VarCurr)<->v460(VarCurr,bitIndex1)).
% 78.37/77.70  all VarCurr (-v535(VarCurr)<->v536(VarCurr)).
% 78.37/77.70  all VarCurr (v536(VarCurr)<->v460(VarCurr,bitIndex0)&v399(VarCurr,bitIndex0)).
% 78.37/77.70  all VarCurr (v399(VarCurr,bitIndex1)<->v401(VarCurr,bitIndex1)).
% 78.37/77.70  all VarCurr (v401(VarCurr,bitIndex1)<->v403(VarCurr,bitIndex1)).
% 78.37/77.70  all VarCurr (v403(VarCurr,bitIndex1)<->v40(VarCurr,bitIndex1)).
% 78.37/77.70  all VarCurr (v40(VarCurr,bitIndex1)<->v42(VarCurr,bitIndex1)).
% 78.37/77.70  all VarCurr (v42(VarCurr,bitIndex1)<->v44(VarCurr,bitIndex1)).
% 78.37/77.70  all VarCurr (v44(VarCurr,bitIndex1)<->v46(VarCurr,bitIndex1)).
% 78.37/77.70  all VarCurr (v46(VarCurr,bitIndex1)<->v48(VarCurr,bitIndex581)).
% 78.37/77.70  all VarNext (v48(VarNext,bitIndex581)<->v524(VarNext,bitIndex1)).
% 78.37/77.70  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v526(VarNext)-> (v524(VarNext,bitIndex115)<->v48(VarCurr,bitIndex695))& (v524(VarNext,bitIndex114)<->v48(VarCurr,bitIndex694))& (v524(VarNext,bitIndex113)<->v48(VarCurr,bitIndex693))& (v524(VarNext,bitIndex112)<->v48(VarCurr,bitIndex692))& (v524(VarNext,bitIndex111)<->v48(VarCurr,bitIndex691))& (v524(VarNext,bitIndex110)<->v48(VarCurr,bitIndex690))& (v524(VarNext,bitIndex109)<->v48(VarCurr,bitIndex689))& (v524(VarNext,bitIndex108)<->v48(VarCurr,bitIndex688))& (v524(VarNext,bitIndex107)<->v48(VarCurr,bitIndex687))& (v524(VarNext,bitIndex106)<->v48(VarCurr,bitIndex686))& (v524(VarNext,bitIndex105)<->v48(VarCurr,bitIndex685))& (v524(VarNext,bitIndex104)<->v48(VarCurr,bitIndex684))& (v524(VarNext,bitIndex103)<->v48(VarCurr,bitIndex683))& (v524(VarNext,bitIndex102)<->v48(VarCurr,bitIndex682))& (v524(VarNext,bitIndex101)<->v48(VarCurr,bitIndex681))& (v524(VarNext,bitIndex100)<->v48(VarCurr,bitIndex680))& (v524(VarNext,bitIndex99)<->v48(VarCurr,bitIndex679))& (v524(VarNext,bitIndex98)<->v48(VarCurr,bitIndex678))& (v524(VarNext,bitIndex97)<->v48(VarCurr,bitIndex677))& (v524(VarNext,bitIndex96)<->v48(VarCurr,bitIndex676))& (v524(VarNext,bitIndex95)<->v48(VarCurr,bitIndex675))& (v524(VarNext,bitIndex94)<->v48(VarCurr,bitIndex674))& (v524(VarNext,bitIndex93)<->v48(VarCurr,bitIndex673))& (v524(VarNext,bitIndex92)<->v48(VarCurr,bitIndex672))& (v524(VarNext,bitIndex91)<->v48(VarCurr,bitIndex671))& (v524(VarNext,bitIndex90)<->v48(VarCurr,bitIndex670))& (v524(VarNext,bitIndex89)<->v48(VarCurr,bitIndex669))& (v524(VarNext,bitIndex88)<->v48(VarCurr,bitIndex668))& (v524(VarNext,bitIndex87)<->v48(VarCurr,bitIndex667))& (v524(VarNext,bitIndex86)<->v48(VarCurr,bitIndex666))& (v524(VarNext,bitIndex85)<->v48(VarCurr,bitIndex665))& (v524(VarNext,bitIndex84)<->v48(VarCurr,bitIndex664))& (v524(VarNext,bitIndex83)<->v48(VarCurr,bitIndex663))& (v524(VarNext,bitIndex82)<->v48(VarCurr,bitIndex662))& (v524(VarNext,bitIndex81)<->v48(VarCurr,bitIndex661))& (v524(VarNext,bitIndex80)<->v48(VarCurr,bitIndex660))& (v524(VarNext,bitIndex79)<->v48(VarCurr,bitIndex659))& (v524(VarNext,bitIndex78)<->v48(VarCurr,bitIndex658))& (v524(VarNext,bitIndex77)<->v48(VarCurr,bitIndex657))& (v524(VarNext,bitIndex76)<->v48(VarCurr,bitIndex656))& (v524(VarNext,bitIndex75)<->v48(VarCurr,bitIndex655))& (v524(VarNext,bitIndex74)<->v48(VarCurr,bitIndex654))& (v524(VarNext,bitIndex73)<->v48(VarCurr,bitIndex653))& (v524(VarNext,bitIndex72)<->v48(VarCurr,bitIndex652))& (v524(VarNext,bitIndex71)<->v48(VarCurr,bitIndex651))& (v524(VarNext,bitIndex70)<->v48(VarCurr,bitIndex650))& (v524(VarNext,bitIndex69)<->v48(VarCurr,bitIndex649))& (v524(VarNext,bitIndex68)<->v48(VarCurr,bitIndex648))& (v524(VarNext,bitIndex67)<->v48(VarCurr,bitIndex647))& (v524(VarNext,bitIndex66)<->v48(VarCurr,bitIndex646))& (v524(VarNext,bitIndex65)<->v48(VarCurr,bitIndex645))& (v524(VarNext,bitIndex64)<->v48(VarCurr,bitIndex644))& (v524(VarNext,bitIndex63)<->v48(VarCurr,bitIndex643))& (v524(VarNext,bitIndex62)<->v48(VarCurr,bitIndex642))& (v524(VarNext,bitIndex61)<->v48(VarCurr,bitIndex641))& (v524(VarNext,bitIndex60)<->v48(VarCurr,bitIndex640))& (v524(VarNext,bitIndex59)<->v48(VarCurr,bitIndex639))& (v524(VarNext,bitIndex58)<->v48(VarCurr,bitIndex638))& (v524(VarNext,bitIndex57)<->v48(VarCurr,bitIndex637))& (v524(VarNext,bitIndex56)<->v48(VarCurr,bitIndex636))& (v524(VarNext,bitIndex55)<->v48(VarCurr,bitIndex635))& (v524(VarNext,bitIndex54)<->v48(VarCurr,bitIndex634))& (v524(VarNext,bitIndex53)<->v48(VarCurr,bitIndex633))& (v524(VarNext,bitIndex52)<->v48(VarCurr,bitIndex632))& (v524(VarNext,bitIndex51)<->v48(VarCurr,bitIndex631))& (v524(VarNext,bitIndex50)<->v48(VarCurr,bitIndex630))& (v524(VarNext,bitIndex49)<->v48(VarCurr,bitIndex629))& (v524(VarNext,bitIndex48)<->v48(VarCurr,bitIndex628))& (v524(VarNext,bitIndex47)<->v48(VarCurr,bitIndex627))& (v524(VarNext,bitIndex46)<->v48(VarCurr,bitIndex626))& (v524(VarNext,bitIndex45)<->v48(VarCurr,bitIndex625))& (v524(VarNext,bitIndex44)<->v48(VarCurr,bitIndex624))& (v524(VarNext,bitIndex43)<->v48(VarCurr,bitIndex623))& (v524(VarNext,bitIndex42)<->v48(VarCurr,bitIndex622))& (v524(VarNext,bitIndex41)<->v48(VarCurr,bitIndex621))& (v524(VarNext,bitIndex40)<->v48(VarCurr,bitIndex620))& (v524(VarNext,bitIndex39)<->v48(VarCurr,bitIndex619))& (v524(VarNext,bitIndex38)<->v48(VarCurr,bitIndex618))& (v524(VarNext,bitIndex37)<->v48(VarCurr,bitIndex617))& (v524(VarNext,bitIndex36)<->v48(VarCurr,bitIndex616))& (v524(VarNext,bitIndex35)<->v48(VarCurr,bitIndex615))& (v524(VarNext,bitIndex34)<->v48(VarCurr,bitIndex614))& (v524(VarNext,bitIndex33)<->v48(VarCurr,bitIndex613))& (v524(VarNext,bitIndex32)<->v48(VarCurr,bitIndex612))& (v524(VarNext,bitIndex31)<->v48(VarCurr,bitIndex611))& (v524(VarNext,bitIndex30)<->v48(VarCurr,bitIndex610))& (v524(VarNext,bitIndex29)<->v48(VarCurr,bitIndex609))& (v524(VarNext,bitIndex28)<->v48(VarCurr,bitIndex608))& (v524(VarNext,bitIndex27)<->v48(VarCurr,bitIndex607))& (v524(VarNext,bitIndex26)<->v48(VarCurr,bitIndex606))& (v524(VarNext,bitIndex25)<->v48(VarCurr,bitIndex605))& (v524(VarNext,bitIndex24)<->v48(VarCurr,bitIndex604))& (v524(VarNext,bitIndex23)<->v48(VarCurr,bitIndex603))& (v524(VarNext,bitIndex22)<->v48(VarCurr,bitIndex602))& (v524(VarNext,bitIndex21)<->v48(VarCurr,bitIndex601))& (v524(VarNext,bitIndex20)<->v48(VarCurr,bitIndex600))& (v524(VarNext,bitIndex19)<->v48(VarCurr,bitIndex599))& (v524(VarNext,bitIndex18)<->v48(VarCurr,bitIndex598))& (v524(VarNext,bitIndex17)<->v48(VarCurr,bitIndex597))& (v524(VarNext,bitIndex16)<->v48(VarCurr,bitIndex596))& (v524(VarNext,bitIndex15)<->v48(VarCurr,bitIndex595))& (v524(VarNext,bitIndex14)<->v48(VarCurr,bitIndex594))& (v524(VarNext,bitIndex13)<->v48(VarCurr,bitIndex593))& (v524(VarNext,bitIndex12)<->v48(VarCurr,bitIndex592))& (v524(VarNext,bitIndex11)<->v48(VarCurr,bitIndex591))& (v524(VarNext,bitIndex10)<->v48(VarCurr,bitIndex590))& (v524(VarNext,bitIndex9)<->v48(VarCurr,bitIndex589))& (v524(VarNext,bitIndex8)<->v48(VarCurr,bitIndex588))& (v524(VarNext,bitIndex7)<->v48(VarCurr,bitIndex587))& (v524(VarNext,bitIndex6)<->v48(VarCurr,bitIndex586))& (v524(VarNext,bitIndex5)<->v48(VarCurr,bitIndex585))& (v524(VarNext,bitIndex4)<->v48(VarCurr,bitIndex584))& (v524(VarNext,bitIndex3)<->v48(VarCurr,bitIndex583))& (v524(VarNext,bitIndex2)<->v48(VarCurr,bitIndex582))& (v524(VarNext,bitIndex1)<->v48(VarCurr,bitIndex581))& (v524(VarNext,bitIndex0)<->v48(VarCurr,bitIndex580)))).
% 78.37/77.71  all VarNext (v526(VarNext)-> (all B (range_115_0(B)-> (v524(VarNext,B)<->v238(VarNext,B))))).
% 78.37/77.71  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v526(VarNext)<->v527(VarNext)&v233(VarNext))).
% 78.37/77.71  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v527(VarNext)<->v529(VarNext)&v188(VarNext))).
% 78.37/77.71  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v529(VarNext)<->v207(VarNext))).
% 78.37/77.71  all VarNext (v48(VarNext,bitIndex465)<->v516(VarNext,bitIndex1)).
% 78.37/77.71  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v518(VarNext)-> (v516(VarNext,bitIndex115)<->v48(VarCurr,bitIndex579))& (v516(VarNext,bitIndex114)<->v48(VarCurr,bitIndex578))& (v516(VarNext,bitIndex113)<->v48(VarCurr,bitIndex577))& (v516(VarNext,bitIndex112)<->v48(VarCurr,bitIndex576))& (v516(VarNext,bitIndex111)<->v48(VarCurr,bitIndex575))& (v516(VarNext,bitIndex110)<->v48(VarCurr,bitIndex574))& (v516(VarNext,bitIndex109)<->v48(VarCurr,bitIndex573))& (v516(VarNext,bitIndex108)<->v48(VarCurr,bitIndex572))& (v516(VarNext,bitIndex107)<->v48(VarCurr,bitIndex571))& (v516(VarNext,bitIndex106)<->v48(VarCurr,bitIndex570))& (v516(VarNext,bitIndex105)<->v48(VarCurr,bitIndex569))& (v516(VarNext,bitIndex104)<->v48(VarCurr,bitIndex568))& (v516(VarNext,bitIndex103)<->v48(VarCurr,bitIndex567))& (v516(VarNext,bitIndex102)<->v48(VarCurr,bitIndex566))& (v516(VarNext,bitIndex101)<->v48(VarCurr,bitIndex565))& (v516(VarNext,bitIndex100)<->v48(VarCurr,bitIndex564))& (v516(VarNext,bitIndex99)<->v48(VarCurr,bitIndex563))& (v516(VarNext,bitIndex98)<->v48(VarCurr,bitIndex562))& (v516(VarNext,bitIndex97)<->v48(VarCurr,bitIndex561))& (v516(VarNext,bitIndex96)<->v48(VarCurr,bitIndex560))& (v516(VarNext,bitIndex95)<->v48(VarCurr,bitIndex559))& (v516(VarNext,bitIndex94)<->v48(VarCurr,bitIndex558))& (v516(VarNext,bitIndex93)<->v48(VarCurr,bitIndex557))& (v516(VarNext,bitIndex92)<->v48(VarCurr,bitIndex556))& (v516(VarNext,bitIndex91)<->v48(VarCurr,bitIndex555))& (v516(VarNext,bitIndex90)<->v48(VarCurr,bitIndex554))& (v516(VarNext,bitIndex89)<->v48(VarCurr,bitIndex553))& (v516(VarNext,bitIndex88)<->v48(VarCurr,bitIndex552))& (v516(VarNext,bitIndex87)<->v48(VarCurr,bitIndex551))& (v516(VarNext,bitIndex86)<->v48(VarCurr,bitIndex550))& (v516(VarNext,bitIndex85)<->v48(VarCurr,bitIndex549))& (v516(VarNext,bitIndex84)<->v48(VarCurr,bitIndex548))& (v516(VarNext,bitIndex83)<->v48(VarCurr,bitIndex547))& (v516(VarNext,bitIndex82)<->v48(VarCurr,bitIndex546))& (v516(VarNext,bitIndex81)<->v48(VarCurr,bitIndex545))& (v516(VarNext,bitIndex80)<->v48(VarCurr,bitIndex544))& (v516(VarNext,bitIndex79)<->v48(VarCurr,bitIndex543))& (v516(VarNext,bitIndex78)<->v48(VarCurr,bitIndex542))& (v516(VarNext,bitIndex77)<->v48(VarCurr,bitIndex541))& (v516(VarNext,bitIndex76)<->v48(VarCurr,bitIndex540))& (v516(VarNext,bitIndex75)<->v48(VarCurr,bitIndex539))& (v516(VarNext,bitIndex74)<->v48(VarCurr,bitIndex538))& (v516(VarNext,bitIndex73)<->v48(VarCurr,bitIndex537))& (v516(VarNext,bitIndex72)<->v48(VarCurr,bitIndex536))& (v516(VarNext,bitIndex71)<->v48(VarCurr,bitIndex535))& (v516(VarNext,bitIndex70)<->v48(VarCurr,bitIndex534))& (v516(VarNext,bitIndex69)<->v48(VarCurr,bitIndex533))& (v516(VarNext,bitIndex68)<->v48(VarCurr,bitIndex532))& (v516(VarNext,bitIndex67)<->v48(VarCurr,bitIndex531))& (v516(VarNext,bitIndex66)<->v48(VarCurr,bitIndex530))& (v516(VarNext,bitIndex65)<->v48(VarCurr,bitIndex529))& (v516(VarNext,bitIndex64)<->v48(VarCurr,bitIndex528))& (v516(VarNext,bitIndex63)<->v48(VarCurr,bitIndex527))& (v516(VarNext,bitIndex62)<->v48(VarCurr,bitIndex526))& (v516(VarNext,bitIndex61)<->v48(VarCurr,bitIndex525))& (v516(VarNext,bitIndex60)<->v48(VarCurr,bitIndex524))& (v516(VarNext,bitIndex59)<->v48(VarCurr,bitIndex523))& (v516(VarNext,bitIndex58)<->v48(VarCurr,bitIndex522))& (v516(VarNext,bitIndex57)<->v48(VarCurr,bitIndex521))& (v516(VarNext,bitIndex56)<->v48(VarCurr,bitIndex520))& (v516(VarNext,bitIndex55)<->v48(VarCurr,bitIndex519))& (v516(VarNext,bitIndex54)<->v48(VarCurr,bitIndex518))& (v516(VarNext,bitIndex53)<->v48(VarCurr,bitIndex517))& (v516(VarNext,bitIndex52)<->v48(VarCurr,bitIndex516))& (v516(VarNext,bitIndex51)<->v48(VarCurr,bitIndex515))& (v516(VarNext,bitIndex50)<->v48(VarCurr,bitIndex514))& (v516(VarNext,bitIndex49)<->v48(VarCurr,bitIndex513))& (v516(VarNext,bitIndex48)<->v48(VarCurr,bitIndex512))& (v516(VarNext,bitIndex47)<->v48(VarCurr,bitIndex511))& (v516(VarNext,bitIndex46)<->v48(VarCurr,bitIndex510))& (v516(VarNext,bitIndex45)<->v48(VarCurr,bitIndex509))& (v516(VarNext,bitIndex44)<->v48(VarCurr,bitIndex508))& (v516(VarNext,bitIndex43)<->v48(VarCurr,bitIndex507))& (v516(VarNext,bitIndex42)<->v48(VarCurr,bitIndex506))& (v516(VarNext,bitIndex41)<->v48(VarCurr,bitIndex505))& (v516(VarNext,bitIndex40)<->v48(VarCurr,bitIndex504))& (v516(VarNext,bitIndex39)<->v48(VarCurr,bitIndex503))& (v516(VarNext,bitIndex38)<->v48(VarCurr,bitIndex502))& (v516(VarNext,bitIndex37)<->v48(VarCurr,bitIndex501))& (v516(VarNext,bitIndex36)<->v48(VarCurr,bitIndex500))& (v516(VarNext,bitIndex35)<->v48(VarCurr,bitIndex499))& (v516(VarNext,bitIndex34)<->v48(VarCurr,bitIndex498))& (v516(VarNext,bitIndex33)<->v48(VarCurr,bitIndex497))& (v516(VarNext,bitIndex32)<->v48(VarCurr,bitIndex496))& (v516(VarNext,bitIndex31)<->v48(VarCurr,bitIndex495))& (v516(VarNext,bitIndex30)<->v48(VarCurr,bitIndex494))& (v516(VarNext,bitIndex29)<->v48(VarCurr,bitIndex493))& (v516(VarNext,bitIndex28)<->v48(VarCurr,bitIndex492))& (v516(VarNext,bitIndex27)<->v48(VarCurr,bitIndex491))& (v516(VarNext,bitIndex26)<->v48(VarCurr,bitIndex490))& (v516(VarNext,bitIndex25)<->v48(VarCurr,bitIndex489))& (v516(VarNext,bitIndex24)<->v48(VarCurr,bitIndex488))& (v516(VarNext,bitIndex23)<->v48(VarCurr,bitIndex487))& (v516(VarNext,bitIndex22)<->v48(VarCurr,bitIndex486))& (v516(VarNext,bitIndex21)<->v48(VarCurr,bitIndex485))& (v516(VarNext,bitIndex20)<->v48(VarCurr,bitIndex484))& (v516(VarNext,bitIndex19)<->v48(VarCurr,bitIndex483))& (v516(VarNext,bitIndex18)<->v48(VarCurr,bitIndex482))& (v516(VarNext,bitIndex17)<->v48(VarCurr,bitIndex481))& (v516(VarNext,bitIndex16)<->v48(VarCurr,bitIndex480))& (v516(VarNext,bitIndex15)<->v48(VarCurr,bitIndex479))& (v516(VarNext,bitIndex14)<->v48(VarCurr,bitIndex478))& (v516(VarNext,bitIndex13)<->v48(VarCurr,bitIndex477))& (v516(VarNext,bitIndex12)<->v48(VarCurr,bitIndex476))& (v516(VarNext,bitIndex11)<->v48(VarCurr,bitIndex475))& (v516(VarNext,bitIndex10)<->v48(VarCurr,bitIndex474))& (v516(VarNext,bitIndex9)<->v48(VarCurr,bitIndex473))& (v516(VarNext,bitIndex8)<->v48(VarCurr,bitIndex472))& (v516(VarNext,bitIndex7)<->v48(VarCurr,bitIndex471))& (v516(VarNext,bitIndex6)<->v48(VarCurr,bitIndex470))& (v516(VarNext,bitIndex5)<->v48(VarCurr,bitIndex469))& (v516(VarNext,bitIndex4)<->v48(VarCurr,bitIndex468))& (v516(VarNext,bitIndex3)<->v48(VarCurr,bitIndex467))& (v516(VarNext,bitIndex2)<->v48(VarCurr,bitIndex466))& (v516(VarNext,bitIndex1)<->v48(VarCurr,bitIndex465))& (v516(VarNext,bitIndex0)<->v48(VarCurr,bitIndex464)))).
% 78.37/77.71  all VarNext (v518(VarNext)-> (all B (range_115_0(B)-> (v516(VarNext,B)<->v219(VarNext,B))))).
% 78.37/77.71  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v518(VarNext)<->v519(VarNext)&v213(VarNext))).
% 78.37/77.71  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v519(VarNext)<->v521(VarNext)&v188(VarNext))).
% 78.37/77.71  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v521(VarNext)<->v207(VarNext))).
% 78.37/77.71  all VarCurr (v180(VarCurr,bitIndex1)<->v182(VarCurr,bitIndex1)).
% 78.37/77.71  all VarCurr (v182(VarCurr,bitIndex1)<->v184(VarCurr,bitIndex1)).
% 78.37/77.71  all VarCurr (v184(VarCurr,bitIndex1)<->v186(VarCurr,bitIndex1)).
% 78.37/77.71  all VarCurr (v399(VarCurr,bitIndex0)<->v401(VarCurr,bitIndex0)).
% 78.37/77.71  all VarCurr (v401(VarCurr,bitIndex0)<->v403(VarCurr,bitIndex0)).
% 78.37/77.71  all VarCurr (v403(VarCurr,bitIndex0)<->v40(VarCurr,bitIndex0)).
% 78.37/77.71  all VarCurr (v40(VarCurr,bitIndex0)<->v42(VarCurr,bitIndex0)).
% 78.37/77.71  all VarCurr (v42(VarCurr,bitIndex0)<->v44(VarCurr,bitIndex0)).
% 78.37/77.71  all VarCurr (v44(VarCurr,bitIndex0)<->v46(VarCurr,bitIndex0)).
% 78.37/77.71  all VarCurr (v46(VarCurr,bitIndex0)<->v48(VarCurr,bitIndex580)).
% 78.37/77.71  all VarNext (v48(VarNext,bitIndex580)<->v508(VarNext,bitIndex0)).
% 78.37/77.71  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v510(VarNext)-> (v508(VarNext,bitIndex115)<->v48(VarCurr,bitIndex695))& (v508(VarNext,bitIndex114)<->v48(VarCurr,bitIndex694))& (v508(VarNext,bitIndex113)<->v48(VarCurr,bitIndex693))& (v508(VarNext,bitIndex112)<->v48(VarCurr,bitIndex692))& (v508(VarNext,bitIndex111)<->v48(VarCurr,bitIndex691))& (v508(VarNext,bitIndex110)<->v48(VarCurr,bitIndex690))& (v508(VarNext,bitIndex109)<->v48(VarCurr,bitIndex689))& (v508(VarNext,bitIndex108)<->v48(VarCurr,bitIndex688))& (v508(VarNext,bitIndex107)<->v48(VarCurr,bitIndex687))& (v508(VarNext,bitIndex106)<->v48(VarCurr,bitIndex686))& (v508(VarNext,bitIndex105)<->v48(VarCurr,bitIndex685))& (v508(VarNext,bitIndex104)<->v48(VarCurr,bitIndex684))& (v508(VarNext,bitIndex103)<->v48(VarCurr,bitIndex683))& (v508(VarNext,bitIndex102)<->v48(VarCurr,bitIndex682))& (v508(VarNext,bitIndex101)<->v48(VarCurr,bitIndex681))& (v508(VarNext,bitIndex100)<->v48(VarCurr,bitIndex680))& (v508(VarNext,bitIndex99)<->v48(VarCurr,bitIndex679))& (v508(VarNext,bitIndex98)<->v48(VarCurr,bitIndex678))& (v508(VarNext,bitIndex97)<->v48(VarCurr,bitIndex677))& (v508(VarNext,bitIndex96)<->v48(VarCurr,bitIndex676))& (v508(VarNext,bitIndex95)<->v48(VarCurr,bitIndex675))& (v508(VarNext,bitIndex94)<->v48(VarCurr,bitIndex674))& (v508(VarNext,bitIndex93)<->v48(VarCurr,bitIndex673))& (v508(VarNext,bitIndex92)<->v48(VarCurr,bitIndex672))& (v508(VarNext,bitIndex91)<->v48(VarCurr,bitIndex671))& (v508(VarNext,bitIndex90)<->v48(VarCurr,bitIndex670))& (v508(VarNext,bitIndex89)<->v48(VarCurr,bitIndex669))& (v508(VarNext,bitIndex88)<->v48(VarCurr,bitIndex668))& (v508(VarNext,bitIndex87)<->v48(VarCurr,bitIndex667))& (v508(VarNext,bitIndex86)<->v48(VarCurr,bitIndex666))& (v508(VarNext,bitIndex85)<->v48(VarCurr,bitIndex665))& (v508(VarNext,bitIndex84)<->v48(VarCurr,bitIndex664))& (v508(VarNext,bitIndex83)<->v48(VarCurr,bitIndex663))& (v508(VarNext,bitIndex82)<->v48(VarCurr,bitIndex662))& (v508(VarNext,bitIndex81)<->v48(VarCurr,bitIndex661))& (v508(VarNext,bitIndex80)<->v48(VarCurr,bitIndex660))& (v508(VarNext,bitIndex79)<->v48(VarCurr,bitIndex659))& (v508(VarNext,bitIndex78)<->v48(VarCurr,bitIndex658))& (v508(VarNext,bitIndex77)<->v48(VarCurr,bitIndex657))& (v508(VarNext,bitIndex76)<->v48(VarCurr,bitIndex656))& (v508(VarNext,bitIndex75)<->v48(VarCurr,bitIndex655))& (v508(VarNext,bitIndex74)<->v48(VarCurr,bitIndex654))& (v508(VarNext,bitIndex73)<->v48(VarCurr,bitIndex653))& (v508(VarNext,bitIndex72)<->v48(VarCurr,bitIndex652))& (v508(VarNext,bitIndex71)<->v48(VarCurr,bitIndex651))& (v508(VarNext,bitIndex70)<->v48(VarCurr,bitIndex650))& (v508(VarNext,bitIndex69)<->v48(VarCurr,bitIndex649))& (v508(VarNext,bitIndex68)<->v48(VarCurr,bitIndex648))& (v508(VarNext,bitIndex67)<->v48(VarCurr,bitIndex647))& (v508(VarNext,bitIndex66)<->v48(VarCurr,bitIndex646))& (v508(VarNext,bitIndex65)<->v48(VarCurr,bitIndex645))& (v508(VarNext,bitIndex64)<->v48(VarCurr,bitIndex644))& (v508(VarNext,bitIndex63)<->v48(VarCurr,bitIndex643))& (v508(VarNext,bitIndex62)<->v48(VarCurr,bitIndex642))& (v508(VarNext,bitIndex61)<->v48(VarCurr,bitIndex641))& (v508(VarNext,bitIndex60)<->v48(VarCurr,bitIndex640))& (v508(VarNext,bitIndex59)<->v48(VarCurr,bitIndex639))& (v508(VarNext,bitIndex58)<->v48(VarCurr,bitIndex638))& (v508(VarNext,bitIndex57)<->v48(VarCurr,bitIndex637))& (v508(VarNext,bitIndex56)<->v48(VarCurr,bitIndex636))& (v508(VarNext,bitIndex55)<->v48(VarCurr,bitIndex635))& (v508(VarNext,bitIndex54)<->v48(VarCurr,bitIndex634))& (v508(VarNext,bitIndex53)<->v48(VarCurr,bitIndex633))& (v508(VarNext,bitIndex52)<->v48(VarCurr,bitIndex632))& (v508(VarNext,bitIndex51)<->v48(VarCurr,bitIndex631))& (v508(VarNext,bitIndex50)<->v48(VarCurr,bitIndex630))& (v508(VarNext,bitIndex49)<->v48(VarCurr,bitIndex629))& (v508(VarNext,bitIndex48)<->v48(VarCurr,bitIndex628))& (v508(VarNext,bitIndex47)<->v48(VarCurr,bitIndex627))& (v508(VarNext,bitIndex46)<->v48(VarCurr,bitIndex626))& (v508(VarNext,bitIndex45)<->v48(VarCurr,bitIndex625))& (v508(VarNext,bitIndex44)<->v48(VarCurr,bitIndex624))& (v508(VarNext,bitIndex43)<->v48(VarCurr,bitIndex623))& (v508(VarNext,bitIndex42)<->v48(VarCurr,bitIndex622))& (v508(VarNext,bitIndex41)<->v48(VarCurr,bitIndex621))& (v508(VarNext,bitIndex40)<->v48(VarCurr,bitIndex620))& (v508(VarNext,bitIndex39)<->v48(VarCurr,bitIndex619))& (v508(VarNext,bitIndex38)<->v48(VarCurr,bitIndex618))& (v508(VarNext,bitIndex37)<->v48(VarCurr,bitIndex617))& (v508(VarNext,bitIndex36)<->v48(VarCurr,bitIndex616))& (v508(VarNext,bitIndex35)<->v48(VarCurr,bitIndex615))& (v508(VarNext,bitIndex34)<->v48(VarCurr,bitIndex614))& (v508(VarNext,bitIndex33)<->v48(VarCurr,bitIndex613))& (v508(VarNext,bitIndex32)<->v48(VarCurr,bitIndex612))& (v508(VarNext,bitIndex31)<->v48(VarCurr,bitIndex611))& (v508(VarNext,bitIndex30)<->v48(VarCurr,bitIndex610))& (v508(VarNext,bitIndex29)<->v48(VarCurr,bitIndex609))& (v508(VarNext,bitIndex28)<->v48(VarCurr,bitIndex608))& (v508(VarNext,bitIndex27)<->v48(VarCurr,bitIndex607))& (v508(VarNext,bitIndex26)<->v48(VarCurr,bitIndex606))& (v508(VarNext,bitIndex25)<->v48(VarCurr,bitIndex605))& (v508(VarNext,bitIndex24)<->v48(VarCurr,bitIndex604))& (v508(VarNext,bitIndex23)<->v48(VarCurr,bitIndex603))& (v508(VarNext,bitIndex22)<->v48(VarCurr,bitIndex602))& (v508(VarNext,bitIndex21)<->v48(VarCurr,bitIndex601))& (v508(VarNext,bitIndex20)<->v48(VarCurr,bitIndex600))& (v508(VarNext,bitIndex19)<->v48(VarCurr,bitIndex599))& (v508(VarNext,bitIndex18)<->v48(VarCurr,bitIndex598))& (v508(VarNext,bitIndex17)<->v48(VarCurr,bitIndex597))& (v508(VarNext,bitIndex16)<->v48(VarCurr,bitIndex596))& (v508(VarNext,bitIndex15)<->v48(VarCurr,bitIndex595))& (v508(VarNext,bitIndex14)<->v48(VarCurr,bitIndex594))& (v508(VarNext,bitIndex13)<->v48(VarCurr,bitIndex593))& (v508(VarNext,bitIndex12)<->v48(VarCurr,bitIndex592))& (v508(VarNext,bitIndex11)<->v48(VarCurr,bitIndex591))& (v508(VarNext,bitIndex10)<->v48(VarCurr,bitIndex590))& (v508(VarNext,bitIndex9)<->v48(VarCurr,bitIndex589))& (v508(VarNext,bitIndex8)<->v48(VarCurr,bitIndex588))& (v508(VarNext,bitIndex7)<->v48(VarCurr,bitIndex587))& (v508(VarNext,bitIndex6)<->v48(VarCurr,bitIndex586))& (v508(VarNext,bitIndex5)<->v48(VarCurr,bitIndex585))& (v508(VarNext,bitIndex4)<->v48(VarCurr,bitIndex584))& (v508(VarNext,bitIndex3)<->v48(VarCurr,bitIndex583))& (v508(VarNext,bitIndex2)<->v48(VarCurr,bitIndex582))& (v508(VarNext,bitIndex1)<->v48(VarCurr,bitIndex581))& (v508(VarNext,bitIndex0)<->v48(VarCurr,bitIndex580)))).
% 78.37/77.71  all VarNext (v510(VarNext)-> (all B (range_115_0(B)-> (v508(VarNext,B)<->v238(VarNext,B))))).
% 78.37/77.71  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v510(VarNext)<->v511(VarNext)&v233(VarNext))).
% 78.37/77.71  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v511(VarNext)<->v513(VarNext)&v188(VarNext))).
% 78.37/77.71  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v513(VarNext)<->v207(VarNext))).
% 78.37/77.71  all VarNext (v48(VarNext,bitIndex464)<->v500(VarNext,bitIndex0)).
% 78.37/77.71  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v502(VarNext)-> (v500(VarNext,bitIndex115)<->v48(VarCurr,bitIndex579))& (v500(VarNext,bitIndex114)<->v48(VarCurr,bitIndex578))& (v500(VarNext,bitIndex113)<->v48(VarCurr,bitIndex577))& (v500(VarNext,bitIndex112)<->v48(VarCurr,bitIndex576))& (v500(VarNext,bitIndex111)<->v48(VarCurr,bitIndex575))& (v500(VarNext,bitIndex110)<->v48(VarCurr,bitIndex574))& (v500(VarNext,bitIndex109)<->v48(VarCurr,bitIndex573))& (v500(VarNext,bitIndex108)<->v48(VarCurr,bitIndex572))& (v500(VarNext,bitIndex107)<->v48(VarCurr,bitIndex571))& (v500(VarNext,bitIndex106)<->v48(VarCurr,bitIndex570))& (v500(VarNext,bitIndex105)<->v48(VarCurr,bitIndex569))& (v500(VarNext,bitIndex104)<->v48(VarCurr,bitIndex568))& (v500(VarNext,bitIndex103)<->v48(VarCurr,bitIndex567))& (v500(VarNext,bitIndex102)<->v48(VarCurr,bitIndex566))& (v500(VarNext,bitIndex101)<->v48(VarCurr,bitIndex565))& (v500(VarNext,bitIndex100)<->v48(VarCurr,bitIndex564))& (v500(VarNext,bitIndex99)<->v48(VarCurr,bitIndex563))& (v500(VarNext,bitIndex98)<->v48(VarCurr,bitIndex562))& (v500(VarNext,bitIndex97)<->v48(VarCurr,bitIndex561))& (v500(VarNext,bitIndex96)<->v48(VarCurr,bitIndex560))& (v500(VarNext,bitIndex95)<->v48(VarCurr,bitIndex559))& (v500(VarNext,bitIndex94)<->v48(VarCurr,bitIndex558))& (v500(VarNext,bitIndex93)<->v48(VarCurr,bitIndex557))& (v500(VarNext,bitIndex92)<->v48(VarCurr,bitIndex556))& (v500(VarNext,bitIndex91)<->v48(VarCurr,bitIndex555))& (v500(VarNext,bitIndex90)<->v48(VarCurr,bitIndex554))& (v500(VarNext,bitIndex89)<->v48(VarCurr,bitIndex553))& (v500(VarNext,bitIndex88)<->v48(VarCurr,bitIndex552))& (v500(VarNext,bitIndex87)<->v48(VarCurr,bitIndex551))& (v500(VarNext,bitIndex86)<->v48(VarCurr,bitIndex550))& (v500(VarNext,bitIndex85)<->v48(VarCurr,bitIndex549))& (v500(VarNext,bitIndex84)<->v48(VarCurr,bitIndex548))& (v500(VarNext,bitIndex83)<->v48(VarCurr,bitIndex547))& (v500(VarNext,bitIndex82)<->v48(VarCurr,bitIndex546))& (v500(VarNext,bitIndex81)<->v48(VarCurr,bitIndex545))& (v500(VarNext,bitIndex80)<->v48(VarCurr,bitIndex544))& (v500(VarNext,bitIndex79)<->v48(VarCurr,bitIndex543))& (v500(VarNext,bitIndex78)<->v48(VarCurr,bitIndex542))& (v500(VarNext,bitIndex77)<->v48(VarCurr,bitIndex541))& (v500(VarNext,bitIndex76)<->v48(VarCurr,bitIndex540))& (v500(VarNext,bitIndex75)<->v48(VarCurr,bitIndex539))& (v500(VarNext,bitIndex74)<->v48(VarCurr,bitIndex538))& (v500(VarNext,bitIndex73)<->v48(VarCurr,bitIndex537))& (v500(VarNext,bitIndex72)<->v48(VarCurr,bitIndex536))& (v500(VarNext,bitIndex71)<->v48(VarCurr,bitIndex535))& (v500(VarNext,bitIndex70)<->v48(VarCurr,bitIndex534))& (v500(VarNext,bitIndex69)<->v48(VarCurr,bitIndex533))& (v500(VarNext,bitIndex68)<->v48(VarCurr,bitIndex532))& (v500(VarNext,bitIndex67)<->v48(VarCurr,bitIndex531))& (v500(VarNext,bitIndex66)<->v48(VarCurr,bitIndex530))& (v500(VarNext,bitIndex65)<->v48(VarCurr,bitIndex529))& (v500(VarNext,bitIndex64)<->v48(VarCurr,bitIndex528))& (v500(VarNext,bitIndex63)<->v48(VarCurr,bitIndex527))& (v500(VarNext,bitIndex62)<->v48(VarCurr,bitIndex526))& (v500(VarNext,bitIndex61)<->v48(VarCurr,bitIndex525))& (v500(VarNext,bitIndex60)<->v48(VarCurr,bitIndex524))& (v500(VarNext,bitIndex59)<->v48(VarCurr,bitIndex523))& (v500(VarNext,bitIndex58)<->v48(VarCurr,bitIndex522))& (v500(VarNext,bitIndex57)<->v48(VarCurr,bitIndex521))& (v500(VarNext,bitIndex56)<->v48(VarCurr,bitIndex520))& (v500(VarNext,bitIndex55)<->v48(VarCurr,bitIndex519))& (v500(VarNext,bitIndex54)<->v48(VarCurr,bitIndex518))& (v500(VarNext,bitIndex53)<->v48(VarCurr,bitIndex517))& (v500(VarNext,bitIndex52)<->v48(VarCurr,bitIndex516))& (v500(VarNext,bitIndex51)<->v48(VarCurr,bitIndex515))& (v500(VarNext,bitIndex50)<->v48(VarCurr,bitIndex514))& (v500(VarNext,bitIndex49)<->v48(VarCurr,bitIndex513))& (v500(VarNext,bitIndex48)<->v48(VarCurr,bitIndex512))& (v500(VarNext,bitIndex47)<->v48(VarCurr,bitIndex511))& (v500(VarNext,bitIndex46)<->v48(VarCurr,bitIndex510))& (v500(VarNext,bitIndex45)<->v48(VarCurr,bitIndex509))& (v500(VarNext,bitIndex44)<->v48(VarCurr,bitIndex508))& (v500(VarNext,bitIndex43)<->v48(VarCurr,bitIndex507))& (v500(VarNext,bitIndex42)<->v48(VarCurr,bitIndex506))& (v500(VarNext,bitIndex41)<->v48(VarCurr,bitIndex505))& (v500(VarNext,bitIndex40)<->v48(VarCurr,bitIndex504))& (v500(VarNext,bitIndex39)<->v48(VarCurr,bitIndex503))& (v500(VarNext,bitIndex38)<->v48(VarCurr,bitIndex502))& (v500(VarNext,bitIndex37)<->v48(VarCurr,bitIndex501))& (v500(VarNext,bitIndex36)<->v48(VarCurr,bitIndex500))& (v500(VarNext,bitIndex35)<->v48(VarCurr,bitIndex499))& (v500(VarNext,bitIndex34)<->v48(VarCurr,bitIndex498))& (v500(VarNext,bitIndex33)<->v48(VarCurr,bitIndex497))& (v500(VarNext,bitIndex32)<->v48(VarCurr,bitIndex496))& (v500(VarNext,bitIndex31)<->v48(VarCurr,bitIndex495))& (v500(VarNext,bitIndex30)<->v48(VarCurr,bitIndex494))& (v500(VarNext,bitIndex29)<->v48(VarCurr,bitIndex493))& (v500(VarNext,bitIndex28)<->v48(VarCurr,bitIndex492))& (v500(VarNext,bitIndex27)<->v48(VarCurr,bitIndex491))& (v500(VarNext,bitIndex26)<->v48(VarCurr,bitIndex490))& (v500(VarNext,bitIndex25)<->v48(VarCurr,bitIndex489))& (v500(VarNext,bitIndex24)<->v48(VarCurr,bitIndex488))& (v500(VarNext,bitIndex23)<->v48(VarCurr,bitIndex487))& (v500(VarNext,bitIndex22)<->v48(VarCurr,bitIndex486))& (v500(VarNext,bitIndex21)<->v48(VarCurr,bitIndex485))& (v500(VarNext,bitIndex20)<->v48(VarCurr,bitIndex484))& (v500(VarNext,bitIndex19)<->v48(VarCurr,bitIndex483))& (v500(VarNext,bitIndex18)<->v48(VarCurr,bitIndex482))& (v500(VarNext,bitIndex17)<->v48(VarCurr,bitIndex481))& (v500(VarNext,bitIndex16)<->v48(VarCurr,bitIndex480))& (v500(VarNext,bitIndex15)<->v48(VarCurr,bitIndex479))& (v500(VarNext,bitIndex14)<->v48(VarCurr,bitIndex478))& (v500(VarNext,bitIndex13)<->v48(VarCurr,bitIndex477))& (v500(VarNext,bitIndex12)<->v48(VarCurr,bitIndex476))& (v500(VarNext,bitIndex11)<->v48(VarCurr,bitIndex475))& (v500(VarNext,bitIndex10)<->v48(VarCurr,bitIndex474))& (v500(VarNext,bitIndex9)<->v48(VarCurr,bitIndex473))& (v500(VarNext,bitIndex8)<->v48(VarCurr,bitIndex472))& (v500(VarNext,bitIndex7)<->v48(VarCurr,bitIndex471))& (v500(VarNext,bitIndex6)<->v48(VarCurr,bitIndex470))& (v500(VarNext,bitIndex5)<->v48(VarCurr,bitIndex469))& (v500(VarNext,bitIndex4)<->v48(VarCurr,bitIndex468))& (v500(VarNext,bitIndex3)<->v48(VarCurr,bitIndex467))& (v500(VarNext,bitIndex2)<->v48(VarCurr,bitIndex466))& (v500(VarNext,bitIndex1)<->v48(VarCurr,bitIndex465))& (v500(VarNext,bitIndex0)<->v48(VarCurr,bitIndex464)))).
% 78.37/77.72  all VarNext (v502(VarNext)-> (all B (range_115_0(B)-> (v500(VarNext,B)<->v219(VarNext,B))))).
% 78.37/77.72  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v502(VarNext)<->v503(VarNext)&v213(VarNext))).
% 78.37/77.72  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v503(VarNext)<->v505(VarNext)&v188(VarNext))).
% 78.37/77.72  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v505(VarNext)<->v207(VarNext))).
% 78.37/77.72  all VarCurr (v180(VarCurr,bitIndex0)<->v182(VarCurr,bitIndex0)).
% 78.37/77.72  all VarCurr (v182(VarCurr,bitIndex0)<->v184(VarCurr,bitIndex0)).
% 78.37/77.72  all VarCurr (v184(VarCurr,bitIndex0)<->v186(VarCurr,bitIndex0)).
% 78.37/77.72  all VarCurr (v460(VarCurr,bitIndex1)<->v462(VarCurr,bitIndex1)).
% 78.37/77.72  all VarCurr (v462(VarCurr,bitIndex1)<->v464(VarCurr,bitIndex1)).
% 78.37/77.72  all VarCurr (v464(VarCurr,bitIndex1)<->v466(VarCurr,bitIndex1)).
% 78.37/77.72  all VarCurr (v466(VarCurr,bitIndex1)<->v42(VarCurr,bitIndex95)).
% 78.37/77.72  all VarCurr (v42(VarCurr,bitIndex95)<->v44(VarCurr,bitIndex95)).
% 78.37/77.72  all VarCurr (v44(VarCurr,bitIndex95)<->v46(VarCurr,bitIndex95)).
% 78.37/77.72  all VarCurr (v46(VarCurr,bitIndex95)<->v48(VarCurr,bitIndex675)).
% 78.37/77.72  all VarNext (v48(VarNext,bitIndex675)<->v492(VarNext,bitIndex95)).
% 78.37/77.72  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v494(VarNext)-> (v492(VarNext,bitIndex115)<->v48(VarCurr,bitIndex695))& (v492(VarNext,bitIndex114)<->v48(VarCurr,bitIndex694))& (v492(VarNext,bitIndex113)<->v48(VarCurr,bitIndex693))& (v492(VarNext,bitIndex112)<->v48(VarCurr,bitIndex692))& (v492(VarNext,bitIndex111)<->v48(VarCurr,bitIndex691))& (v492(VarNext,bitIndex110)<->v48(VarCurr,bitIndex690))& (v492(VarNext,bitIndex109)<->v48(VarCurr,bitIndex689))& (v492(VarNext,bitIndex108)<->v48(VarCurr,bitIndex688))& (v492(VarNext,bitIndex107)<->v48(VarCurr,bitIndex687))& (v492(VarNext,bitIndex106)<->v48(VarCurr,bitIndex686))& (v492(VarNext,bitIndex105)<->v48(VarCurr,bitIndex685))& (v492(VarNext,bitIndex104)<->v48(VarCurr,bitIndex684))& (v492(VarNext,bitIndex103)<->v48(VarCurr,bitIndex683))& (v492(VarNext,bitIndex102)<->v48(VarCurr,bitIndex682))& (v492(VarNext,bitIndex101)<->v48(VarCurr,bitIndex681))& (v492(VarNext,bitIndex100)<->v48(VarCurr,bitIndex680))& (v492(VarNext,bitIndex99)<->v48(VarCurr,bitIndex679))& (v492(VarNext,bitIndex98)<->v48(VarCurr,bitIndex678))& (v492(VarNext,bitIndex97)<->v48(VarCurr,bitIndex677))& (v492(VarNext,bitIndex96)<->v48(VarCurr,bitIndex676))& (v492(VarNext,bitIndex95)<->v48(VarCurr,bitIndex675))& (v492(VarNext,bitIndex94)<->v48(VarCurr,bitIndex674))& (v492(VarNext,bitIndex93)<->v48(VarCurr,bitIndex673))& (v492(VarNext,bitIndex92)<->v48(VarCurr,bitIndex672))& (v492(VarNext,bitIndex91)<->v48(VarCurr,bitIndex671))& (v492(VarNext,bitIndex90)<->v48(VarCurr,bitIndex670))& (v492(VarNext,bitIndex89)<->v48(VarCurr,bitIndex669))& (v492(VarNext,bitIndex88)<->v48(VarCurr,bitIndex668))& (v492(VarNext,bitIndex87)<->v48(VarCurr,bitIndex667))& (v492(VarNext,bitIndex86)<->v48(VarCurr,bitIndex666))& (v492(VarNext,bitIndex85)<->v48(VarCurr,bitIndex665))& (v492(VarNext,bitIndex84)<->v48(VarCurr,bitIndex664))& (v492(VarNext,bitIndex83)<->v48(VarCurr,bitIndex663))& (v492(VarNext,bitIndex82)<->v48(VarCurr,bitIndex662))& (v492(VarNext,bitIndex81)<->v48(VarCurr,bitIndex661))& (v492(VarNext,bitIndex80)<->v48(VarCurr,bitIndex660))& (v492(VarNext,bitIndex79)<->v48(VarCurr,bitIndex659))& (v492(VarNext,bitIndex78)<->v48(VarCurr,bitIndex658))& (v492(VarNext,bitIndex77)<->v48(VarCurr,bitIndex657))& (v492(VarNext,bitIndex76)<->v48(VarCurr,bitIndex656))& (v492(VarNext,bitIndex75)<->v48(VarCurr,bitIndex655))& (v492(VarNext,bitIndex74)<->v48(VarCurr,bitIndex654))& (v492(VarNext,bitIndex73)<->v48(VarCurr,bitIndex653))& (v492(VarNext,bitIndex72)<->v48(VarCurr,bitIndex652))& (v492(VarNext,bitIndex71)<->v48(VarCurr,bitIndex651))& (v492(VarNext,bitIndex70)<->v48(VarCurr,bitIndex650))& (v492(VarNext,bitIndex69)<->v48(VarCurr,bitIndex649))& (v492(VarNext,bitIndex68)<->v48(VarCurr,bitIndex648))& (v492(VarNext,bitIndex67)<->v48(VarCurr,bitIndex647))& (v492(VarNext,bitIndex66)<->v48(VarCurr,bitIndex646))& (v492(VarNext,bitIndex65)<->v48(VarCurr,bitIndex645))& (v492(VarNext,bitIndex64)<->v48(VarCurr,bitIndex644))& (v492(VarNext,bitIndex63)<->v48(VarCurr,bitIndex643))& (v492(VarNext,bitIndex62)<->v48(VarCurr,bitIndex642))& (v492(VarNext,bitIndex61)<->v48(VarCurr,bitIndex641))& (v492(VarNext,bitIndex60)<->v48(VarCurr,bitIndex640))& (v492(VarNext,bitIndex59)<->v48(VarCurr,bitIndex639))& (v492(VarNext,bitIndex58)<->v48(VarCurr,bitIndex638))& (v492(VarNext,bitIndex57)<->v48(VarCurr,bitIndex637))& (v492(VarNext,bitIndex56)<->v48(VarCurr,bitIndex636))& (v492(VarNext,bitIndex55)<->v48(VarCurr,bitIndex635))& (v492(VarNext,bitIndex54)<->v48(VarCurr,bitIndex634))& (v492(VarNext,bitIndex53)<->v48(VarCurr,bitIndex633))& (v492(VarNext,bitIndex52)<->v48(VarCurr,bitIndex632))& (v492(VarNext,bitIndex51)<->v48(VarCurr,bitIndex631))& (v492(VarNext,bitIndex50)<->v48(VarCurr,bitIndex630))& (v492(VarNext,bitIndex49)<->v48(VarCurr,bitIndex629))& (v492(VarNext,bitIndex48)<->v48(VarCurr,bitIndex628))& (v492(VarNext,bitIndex47)<->v48(VarCurr,bitIndex627))& (v492(VarNext,bitIndex46)<->v48(VarCurr,bitIndex626))& (v492(VarNext,bitIndex45)<->v48(VarCurr,bitIndex625))& (v492(VarNext,bitIndex44)<->v48(VarCurr,bitIndex624))& (v492(VarNext,bitIndex43)<->v48(VarCurr,bitIndex623))& (v492(VarNext,bitIndex42)<->v48(VarCurr,bitIndex622))& (v492(VarNext,bitIndex41)<->v48(VarCurr,bitIndex621))& (v492(VarNext,bitIndex40)<->v48(VarCurr,bitIndex620))& (v492(VarNext,bitIndex39)<->v48(VarCurr,bitIndex619))& (v492(VarNext,bitIndex38)<->v48(VarCurr,bitIndex618))& (v492(VarNext,bitIndex37)<->v48(VarCurr,bitIndex617))& (v492(VarNext,bitIndex36)<->v48(VarCurr,bitIndex616))& (v492(VarNext,bitIndex35)<->v48(VarCurr,bitIndex615))& (v492(VarNext,bitIndex34)<->v48(VarCurr,bitIndex614))& (v492(VarNext,bitIndex33)<->v48(VarCurr,bitIndex613))& (v492(VarNext,bitIndex32)<->v48(VarCurr,bitIndex612))& (v492(VarNext,bitIndex31)<->v48(VarCurr,bitIndex611))& (v492(VarNext,bitIndex30)<->v48(VarCurr,bitIndex610))& (v492(VarNext,bitIndex29)<->v48(VarCurr,bitIndex609))& (v492(VarNext,bitIndex28)<->v48(VarCurr,bitIndex608))& (v492(VarNext,bitIndex27)<->v48(VarCurr,bitIndex607))& (v492(VarNext,bitIndex26)<->v48(VarCurr,bitIndex606))& (v492(VarNext,bitIndex25)<->v48(VarCurr,bitIndex605))& (v492(VarNext,bitIndex24)<->v48(VarCurr,bitIndex604))& (v492(VarNext,bitIndex23)<->v48(VarCurr,bitIndex603))& (v492(VarNext,bitIndex22)<->v48(VarCurr,bitIndex602))& (v492(VarNext,bitIndex21)<->v48(VarCurr,bitIndex601))& (v492(VarNext,bitIndex20)<->v48(VarCurr,bitIndex600))& (v492(VarNext,bitIndex19)<->v48(VarCurr,bitIndex599))& (v492(VarNext,bitIndex18)<->v48(VarCurr,bitIndex598))& (v492(VarNext,bitIndex17)<->v48(VarCurr,bitIndex597))& (v492(VarNext,bitIndex16)<->v48(VarCurr,bitIndex596))& (v492(VarNext,bitIndex15)<->v48(VarCurr,bitIndex595))& (v492(VarNext,bitIndex14)<->v48(VarCurr,bitIndex594))& (v492(VarNext,bitIndex13)<->v48(VarCurr,bitIndex593))& (v492(VarNext,bitIndex12)<->v48(VarCurr,bitIndex592))& (v492(VarNext,bitIndex11)<->v48(VarCurr,bitIndex591))& (v492(VarNext,bitIndex10)<->v48(VarCurr,bitIndex590))& (v492(VarNext,bitIndex9)<->v48(VarCurr,bitIndex589))& (v492(VarNext,bitIndex8)<->v48(VarCurr,bitIndex588))& (v492(VarNext,bitIndex7)<->v48(VarCurr,bitIndex587))& (v492(VarNext,bitIndex6)<->v48(VarCurr,bitIndex586))& (v492(VarNext,bitIndex5)<->v48(VarCurr,bitIndex585))& (v492(VarNext,bitIndex4)<->v48(VarCurr,bitIndex584))& (v492(VarNext,bitIndex3)<->v48(VarCurr,bitIndex583))& (v492(VarNext,bitIndex2)<->v48(VarCurr,bitIndex582))& (v492(VarNext,bitIndex1)<->v48(VarCurr,bitIndex581))& (v492(VarNext,bitIndex0)<->v48(VarCurr,bitIndex580)))).
% 78.37/77.72  all VarNext (v494(VarNext)-> (all B (range_115_0(B)-> (v492(VarNext,B)<->v238(VarNext,B))))).
% 78.37/77.72  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v494(VarNext)<->v495(VarNext)&v233(VarNext))).
% 78.37/77.72  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v495(VarNext)<->v497(VarNext)&v188(VarNext))).
% 78.37/77.72  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v497(VarNext)<->v207(VarNext))).
% 78.37/77.72  all VarNext (v48(VarNext,bitIndex559)<->v484(VarNext,bitIndex95)).
% 78.37/77.72  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v486(VarNext)-> (v484(VarNext,bitIndex115)<->v48(VarCurr,bitIndex579))& (v484(VarNext,bitIndex114)<->v48(VarCurr,bitIndex578))& (v484(VarNext,bitIndex113)<->v48(VarCurr,bitIndex577))& (v484(VarNext,bitIndex112)<->v48(VarCurr,bitIndex576))& (v484(VarNext,bitIndex111)<->v48(VarCurr,bitIndex575))& (v484(VarNext,bitIndex110)<->v48(VarCurr,bitIndex574))& (v484(VarNext,bitIndex109)<->v48(VarCurr,bitIndex573))& (v484(VarNext,bitIndex108)<->v48(VarCurr,bitIndex572))& (v484(VarNext,bitIndex107)<->v48(VarCurr,bitIndex571))& (v484(VarNext,bitIndex106)<->v48(VarCurr,bitIndex570))& (v484(VarNext,bitIndex105)<->v48(VarCurr,bitIndex569))& (v484(VarNext,bitIndex104)<->v48(VarCurr,bitIndex568))& (v484(VarNext,bitIndex103)<->v48(VarCurr,bitIndex567))& (v484(VarNext,bitIndex102)<->v48(VarCurr,bitIndex566))& (v484(VarNext,bitIndex101)<->v48(VarCurr,bitIndex565))& (v484(VarNext,bitIndex100)<->v48(VarCurr,bitIndex564))& (v484(VarNext,bitIndex99)<->v48(VarCurr,bitIndex563))& (v484(VarNext,bitIndex98)<->v48(VarCurr,bitIndex562))& (v484(VarNext,bitIndex97)<->v48(VarCurr,bitIndex561))& (v484(VarNext,bitIndex96)<->v48(VarCurr,bitIndex560))& (v484(VarNext,bitIndex95)<->v48(VarCurr,bitIndex559))& (v484(VarNext,bitIndex94)<->v48(VarCurr,bitIndex558))& (v484(VarNext,bitIndex93)<->v48(VarCurr,bitIndex557))& (v484(VarNext,bitIndex92)<->v48(VarCurr,bitIndex556))& (v484(VarNext,bitIndex91)<->v48(VarCurr,bitIndex555))& (v484(VarNext,bitIndex90)<->v48(VarCurr,bitIndex554))& (v484(VarNext,bitIndex89)<->v48(VarCurr,bitIndex553))& (v484(VarNext,bitIndex88)<->v48(VarCurr,bitIndex552))& (v484(VarNext,bitIndex87)<->v48(VarCurr,bitIndex551))& (v484(VarNext,bitIndex86)<->v48(VarCurr,bitIndex550))& (v484(VarNext,bitIndex85)<->v48(VarCurr,bitIndex549))& (v484(VarNext,bitIndex84)<->v48(VarCurr,bitIndex548))& (v484(VarNext,bitIndex83)<->v48(VarCurr,bitIndex547))& (v484(VarNext,bitIndex82)<->v48(VarCurr,bitIndex546))& (v484(VarNext,bitIndex81)<->v48(VarCurr,bitIndex545))& (v484(VarNext,bitIndex80)<->v48(VarCurr,bitIndex544))& (v484(VarNext,bitIndex79)<->v48(VarCurr,bitIndex543))& (v484(VarNext,bitIndex78)<->v48(VarCurr,bitIndex542))& (v484(VarNext,bitIndex77)<->v48(VarCurr,bitIndex541))& (v484(VarNext,bitIndex76)<->v48(VarCurr,bitIndex540))& (v484(VarNext,bitIndex75)<->v48(VarCurr,bitIndex539))& (v484(VarNext,bitIndex74)<->v48(VarCurr,bitIndex538))& (v484(VarNext,bitIndex73)<->v48(VarCurr,bitIndex537))& (v484(VarNext,bitIndex72)<->v48(VarCurr,bitIndex536))& (v484(VarNext,bitIndex71)<->v48(VarCurr,bitIndex535))& (v484(VarNext,bitIndex70)<->v48(VarCurr,bitIndex534))& (v484(VarNext,bitIndex69)<->v48(VarCurr,bitIndex533))& (v484(VarNext,bitIndex68)<->v48(VarCurr,bitIndex532))& (v484(VarNext,bitIndex67)<->v48(VarCurr,bitIndex531))& (v484(VarNext,bitIndex66)<->v48(VarCurr,bitIndex530))& (v484(VarNext,bitIndex65)<->v48(VarCurr,bitIndex529))& (v484(VarNext,bitIndex64)<->v48(VarCurr,bitIndex528))& (v484(VarNext,bitIndex63)<->v48(VarCurr,bitIndex527))& (v484(VarNext,bitIndex62)<->v48(VarCurr,bitIndex526))& (v484(VarNext,bitIndex61)<->v48(VarCurr,bitIndex525))& (v484(VarNext,bitIndex60)<->v48(VarCurr,bitIndex524))& (v484(VarNext,bitIndex59)<->v48(VarCurr,bitIndex523))& (v484(VarNext,bitIndex58)<->v48(VarCurr,bitIndex522))& (v484(VarNext,bitIndex57)<->v48(VarCurr,bitIndex521))& (v484(VarNext,bitIndex56)<->v48(VarCurr,bitIndex520))& (v484(VarNext,bitIndex55)<->v48(VarCurr,bitIndex519))& (v484(VarNext,bitIndex54)<->v48(VarCurr,bitIndex518))& (v484(VarNext,bitIndex53)<->v48(VarCurr,bitIndex517))& (v484(VarNext,bitIndex52)<->v48(VarCurr,bitIndex516))& (v484(VarNext,bitIndex51)<->v48(VarCurr,bitIndex515))& (v484(VarNext,bitIndex50)<->v48(VarCurr,bitIndex514))& (v484(VarNext,bitIndex49)<->v48(VarCurr,bitIndex513))& (v484(VarNext,bitIndex48)<->v48(VarCurr,bitIndex512))& (v484(VarNext,bitIndex47)<->v48(VarCurr,bitIndex511))& (v484(VarNext,bitIndex46)<->v48(VarCurr,bitIndex510))& (v484(VarNext,bitIndex45)<->v48(VarCurr,bitIndex509))& (v484(VarNext,bitIndex44)<->v48(VarCurr,bitIndex508))& (v484(VarNext,bitIndex43)<->v48(VarCurr,bitIndex507))& (v484(VarNext,bitIndex42)<->v48(VarCurr,bitIndex506))& (v484(VarNext,bitIndex41)<->v48(VarCurr,bitIndex505))& (v484(VarNext,bitIndex40)<->v48(VarCurr,bitIndex504))& (v484(VarNext,bitIndex39)<->v48(VarCurr,bitIndex503))& (v484(VarNext,bitIndex38)<->v48(VarCurr,bitIndex502))& (v484(VarNext,bitIndex37)<->v48(VarCurr,bitIndex501))& (v484(VarNext,bitIndex36)<->v48(VarCurr,bitIndex500))& (v484(VarNext,bitIndex35)<->v48(VarCurr,bitIndex499))& (v484(VarNext,bitIndex34)<->v48(VarCurr,bitIndex498))& (v484(VarNext,bitIndex33)<->v48(VarCurr,bitIndex497))& (v484(VarNext,bitIndex32)<->v48(VarCurr,bitIndex496))& (v484(VarNext,bitIndex31)<->v48(VarCurr,bitIndex495))& (v484(VarNext,bitIndex30)<->v48(VarCurr,bitIndex494))& (v484(VarNext,bitIndex29)<->v48(VarCurr,bitIndex493))& (v484(VarNext,bitIndex28)<->v48(VarCurr,bitIndex492))& (v484(VarNext,bitIndex27)<->v48(VarCurr,bitIndex491))& (v484(VarNext,bitIndex26)<->v48(VarCurr,bitIndex490))& (v484(VarNext,bitIndex25)<->v48(VarCurr,bitIndex489))& (v484(VarNext,bitIndex24)<->v48(VarCurr,bitIndex488))& (v484(VarNext,bitIndex23)<->v48(VarCurr,bitIndex487))& (v484(VarNext,bitIndex22)<->v48(VarCurr,bitIndex486))& (v484(VarNext,bitIndex21)<->v48(VarCurr,bitIndex485))& (v484(VarNext,bitIndex20)<->v48(VarCurr,bitIndex484))& (v484(VarNext,bitIndex19)<->v48(VarCurr,bitIndex483))& (v484(VarNext,bitIndex18)<->v48(VarCurr,bitIndex482))& (v484(VarNext,bitIndex17)<->v48(VarCurr,bitIndex481))& (v484(VarNext,bitIndex16)<->v48(VarCurr,bitIndex480))& (v484(VarNext,bitIndex15)<->v48(VarCurr,bitIndex479))& (v484(VarNext,bitIndex14)<->v48(VarCurr,bitIndex478))& (v484(VarNext,bitIndex13)<->v48(VarCurr,bitIndex477))& (v484(VarNext,bitIndex12)<->v48(VarCurr,bitIndex476))& (v484(VarNext,bitIndex11)<->v48(VarCurr,bitIndex475))& (v484(VarNext,bitIndex10)<->v48(VarCurr,bitIndex474))& (v484(VarNext,bitIndex9)<->v48(VarCurr,bitIndex473))& (v484(VarNext,bitIndex8)<->v48(VarCurr,bitIndex472))& (v484(VarNext,bitIndex7)<->v48(VarCurr,bitIndex471))& (v484(VarNext,bitIndex6)<->v48(VarCurr,bitIndex470))& (v484(VarNext,bitIndex5)<->v48(VarCurr,bitIndex469))& (v484(VarNext,bitIndex4)<->v48(VarCurr,bitIndex468))& (v484(VarNext,bitIndex3)<->v48(VarCurr,bitIndex467))& (v484(VarNext,bitIndex2)<->v48(VarCurr,bitIndex466))& (v484(VarNext,bitIndex1)<->v48(VarCurr,bitIndex465))& (v484(VarNext,bitIndex0)<->v48(VarCurr,bitIndex464)))).
% 78.37/77.73  all VarNext (v486(VarNext)-> (all B (range_115_0(B)-> (v484(VarNext,B)<->v219(VarNext,B))))).
% 78.37/77.73  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v486(VarNext)<->v487(VarNext)&v213(VarNext))).
% 78.37/77.73  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v487(VarNext)<->v489(VarNext)&v188(VarNext))).
% 78.37/77.73  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v489(VarNext)<->v207(VarNext))).
% 78.37/77.73  all VarCurr (v180(VarCurr,bitIndex95)<->v182(VarCurr,bitIndex95)).
% 78.37/77.73  all VarCurr (v182(VarCurr,bitIndex95)<->v184(VarCurr,bitIndex95)).
% 78.37/77.73  all VarCurr (v184(VarCurr,bitIndex95)<->v186(VarCurr,bitIndex95)).
% 78.37/77.73  all VarCurr (v460(VarCurr,bitIndex0)<->v462(VarCurr,bitIndex0)).
% 78.37/77.73  all VarCurr (v462(VarCurr,bitIndex0)<->v464(VarCurr,bitIndex0)).
% 78.37/77.73  all VarCurr (v464(VarCurr,bitIndex0)<->v466(VarCurr,bitIndex0)).
% 78.37/77.73  all VarCurr (v466(VarCurr,bitIndex0)<->v42(VarCurr,bitIndex94)).
% 78.37/77.73  all VarCurr (v42(VarCurr,bitIndex94)<->v44(VarCurr,bitIndex94)).
% 78.37/77.73  all VarCurr (v44(VarCurr,bitIndex94)<->v46(VarCurr,bitIndex94)).
% 78.37/77.73  all VarCurr (v46(VarCurr,bitIndex94)<->v48(VarCurr,bitIndex674)).
% 78.37/77.73  all VarNext (v48(VarNext,bitIndex674)<->v476(VarNext,bitIndex94)).
% 78.37/77.73  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v478(VarNext)-> (v476(VarNext,bitIndex115)<->v48(VarCurr,bitIndex695))& (v476(VarNext,bitIndex114)<->v48(VarCurr,bitIndex694))& (v476(VarNext,bitIndex113)<->v48(VarCurr,bitIndex693))& (v476(VarNext,bitIndex112)<->v48(VarCurr,bitIndex692))& (v476(VarNext,bitIndex111)<->v48(VarCurr,bitIndex691))& (v476(VarNext,bitIndex110)<->v48(VarCurr,bitIndex690))& (v476(VarNext,bitIndex109)<->v48(VarCurr,bitIndex689))& (v476(VarNext,bitIndex108)<->v48(VarCurr,bitIndex688))& (v476(VarNext,bitIndex107)<->v48(VarCurr,bitIndex687))& (v476(VarNext,bitIndex106)<->v48(VarCurr,bitIndex686))& (v476(VarNext,bitIndex105)<->v48(VarCurr,bitIndex685))& (v476(VarNext,bitIndex104)<->v48(VarCurr,bitIndex684))& (v476(VarNext,bitIndex103)<->v48(VarCurr,bitIndex683))& (v476(VarNext,bitIndex102)<->v48(VarCurr,bitIndex682))& (v476(VarNext,bitIndex101)<->v48(VarCurr,bitIndex681))& (v476(VarNext,bitIndex100)<->v48(VarCurr,bitIndex680))& (v476(VarNext,bitIndex99)<->v48(VarCurr,bitIndex679))& (v476(VarNext,bitIndex98)<->v48(VarCurr,bitIndex678))& (v476(VarNext,bitIndex97)<->v48(VarCurr,bitIndex677))& (v476(VarNext,bitIndex96)<->v48(VarCurr,bitIndex676))& (v476(VarNext,bitIndex95)<->v48(VarCurr,bitIndex675))& (v476(VarNext,bitIndex94)<->v48(VarCurr,bitIndex674))& (v476(VarNext,bitIndex93)<->v48(VarCurr,bitIndex673))& (v476(VarNext,bitIndex92)<->v48(VarCurr,bitIndex672))& (v476(VarNext,bitIndex91)<->v48(VarCurr,bitIndex671))& (v476(VarNext,bitIndex90)<->v48(VarCurr,bitIndex670))& (v476(VarNext,bitIndex89)<->v48(VarCurr,bitIndex669))& (v476(VarNext,bitIndex88)<->v48(VarCurr,bitIndex668))& (v476(VarNext,bitIndex87)<->v48(VarCurr,bitIndex667))& (v476(VarNext,bitIndex86)<->v48(VarCurr,bitIndex666))& (v476(VarNext,bitIndex85)<->v48(VarCurr,bitIndex665))& (v476(VarNext,bitIndex84)<->v48(VarCurr,bitIndex664))& (v476(VarNext,bitIndex83)<->v48(VarCurr,bitIndex663))& (v476(VarNext,bitIndex82)<->v48(VarCurr,bitIndex662))& (v476(VarNext,bitIndex81)<->v48(VarCurr,bitIndex661))& (v476(VarNext,bitIndex80)<->v48(VarCurr,bitIndex660))& (v476(VarNext,bitIndex79)<->v48(VarCurr,bitIndex659))& (v476(VarNext,bitIndex78)<->v48(VarCurr,bitIndex658))& (v476(VarNext,bitIndex77)<->v48(VarCurr,bitIndex657))& (v476(VarNext,bitIndex76)<->v48(VarCurr,bitIndex656))& (v476(VarNext,bitIndex75)<->v48(VarCurr,bitIndex655))& (v476(VarNext,bitIndex74)<->v48(VarCurr,bitIndex654))& (v476(VarNext,bitIndex73)<->v48(VarCurr,bitIndex653))& (v476(VarNext,bitIndex72)<->v48(VarCurr,bitIndex652))& (v476(VarNext,bitIndex71)<->v48(VarCurr,bitIndex651))& (v476(VarNext,bitIndex70)<->v48(VarCurr,bitIndex650))& (v476(VarNext,bitIndex69)<->v48(VarCurr,bitIndex649))& (v476(VarNext,bitIndex68)<->v48(VarCurr,bitIndex648))& (v476(VarNext,bitIndex67)<->v48(VarCurr,bitIndex647))& (v476(VarNext,bitIndex66)<->v48(VarCurr,bitIndex646))& (v476(VarNext,bitIndex65)<->v48(VarCurr,bitIndex645))& (v476(VarNext,bitIndex64)<->v48(VarCurr,bitIndex644))& (v476(VarNext,bitIndex63)<->v48(VarCurr,bitIndex643))& (v476(VarNext,bitIndex62)<->v48(VarCurr,bitIndex642))& (v476(VarNext,bitIndex61)<->v48(VarCurr,bitIndex641))& (v476(VarNext,bitIndex60)<->v48(VarCurr,bitIndex640))& (v476(VarNext,bitIndex59)<->v48(VarCurr,bitIndex639))& (v476(VarNext,bitIndex58)<->v48(VarCurr,bitIndex638))& (v476(VarNext,bitIndex57)<->v48(VarCurr,bitIndex637))& (v476(VarNext,bitIndex56)<->v48(VarCurr,bitIndex636))& (v476(VarNext,bitIndex55)<->v48(VarCurr,bitIndex635))& (v476(VarNext,bitIndex54)<->v48(VarCurr,bitIndex634))& (v476(VarNext,bitIndex53)<->v48(VarCurr,bitIndex633))& (v476(VarNext,bitIndex52)<->v48(VarCurr,bitIndex632))& (v476(VarNext,bitIndex51)<->v48(VarCurr,bitIndex631))& (v476(VarNext,bitIndex50)<->v48(VarCurr,bitIndex630))& (v476(VarNext,bitIndex49)<->v48(VarCurr,bitIndex629))& (v476(VarNext,bitIndex48)<->v48(VarCurr,bitIndex628))& (v476(VarNext,bitIndex47)<->v48(VarCurr,bitIndex627))& (v476(VarNext,bitIndex46)<->v48(VarCurr,bitIndex626))& (v476(VarNext,bitIndex45)<->v48(VarCurr,bitIndex625))& (v476(VarNext,bitIndex44)<->v48(VarCurr,bitIndex624))& (v476(VarNext,bitIndex43)<->v48(VarCurr,bitIndex623))& (v476(VarNext,bitIndex42)<->v48(VarCurr,bitIndex622))& (v476(VarNext,bitIndex41)<->v48(VarCurr,bitIndex621))& (v476(VarNext,bitIndex40)<->v48(VarCurr,bitIndex620))& (v476(VarNext,bitIndex39)<->v48(VarCurr,bitIndex619))& (v476(VarNext,bitIndex38)<->v48(VarCurr,bitIndex618))& (v476(VarNext,bitIndex37)<->v48(VarCurr,bitIndex617))& (v476(VarNext,bitIndex36)<->v48(VarCurr,bitIndex616))& (v476(VarNext,bitIndex35)<->v48(VarCurr,bitIndex615))& (v476(VarNext,bitIndex34)<->v48(VarCurr,bitIndex614))& (v476(VarNext,bitIndex33)<->v48(VarCurr,bitIndex613))& (v476(VarNext,bitIndex32)<->v48(VarCurr,bitIndex612))& (v476(VarNext,bitIndex31)<->v48(VarCurr,bitIndex611))& (v476(VarNext,bitIndex30)<->v48(VarCurr,bitIndex610))& (v476(VarNext,bitIndex29)<->v48(VarCurr,bitIndex609))& (v476(VarNext,bitIndex28)<->v48(VarCurr,bitIndex608))& (v476(VarNext,bitIndex27)<->v48(VarCurr,bitIndex607))& (v476(VarNext,bitIndex26)<->v48(VarCurr,bitIndex606))& (v476(VarNext,bitIndex25)<->v48(VarCurr,bitIndex605))& (v476(VarNext,bitIndex24)<->v48(VarCurr,bitIndex604))& (v476(VarNext,bitIndex23)<->v48(VarCurr,bitIndex603))& (v476(VarNext,bitIndex22)<->v48(VarCurr,bitIndex602))& (v476(VarNext,bitIndex21)<->v48(VarCurr,bitIndex601))& (v476(VarNext,bitIndex20)<->v48(VarCurr,bitIndex600))& (v476(VarNext,bitIndex19)<->v48(VarCurr,bitIndex599))& (v476(VarNext,bitIndex18)<->v48(VarCurr,bitIndex598))& (v476(VarNext,bitIndex17)<->v48(VarCurr,bitIndex597))& (v476(VarNext,bitIndex16)<->v48(VarCurr,bitIndex596))& (v476(VarNext,bitIndex15)<->v48(VarCurr,bitIndex595))& (v476(VarNext,bitIndex14)<->v48(VarCurr,bitIndex594))& (v476(VarNext,bitIndex13)<->v48(VarCurr,bitIndex593))& (v476(VarNext,bitIndex12)<->v48(VarCurr,bitIndex592))& (v476(VarNext,bitIndex11)<->v48(VarCurr,bitIndex591))& (v476(VarNext,bitIndex10)<->v48(VarCurr,bitIndex590))& (v476(VarNext,bitIndex9)<->v48(VarCurr,bitIndex589))& (v476(VarNext,bitIndex8)<->v48(VarCurr,bitIndex588))& (v476(VarNext,bitIndex7)<->v48(VarCurr,bitIndex587))& (v476(VarNext,bitIndex6)<->v48(VarCurr,bitIndex586))& (v476(VarNext,bitIndex5)<->v48(VarCurr,bitIndex585))& (v476(VarNext,bitIndex4)<->v48(VarCurr,bitIndex584))& (v476(VarNext,bitIndex3)<->v48(VarCurr,bitIndex583))& (v476(VarNext,bitIndex2)<->v48(VarCurr,bitIndex582))& (v476(VarNext,bitIndex1)<->v48(VarCurr,bitIndex581))& (v476(VarNext,bitIndex0)<->v48(VarCurr,bitIndex580)))).
% 78.37/77.73  all VarNext (v478(VarNext)-> (all B (range_115_0(B)-> (v476(VarNext,B)<->v238(VarNext,B))))).
% 78.37/77.73  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v478(VarNext)<->v479(VarNext)&v233(VarNext))).
% 78.37/77.73  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v479(VarNext)<->v481(VarNext)&v188(VarNext))).
% 78.37/77.73  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v481(VarNext)<->v207(VarNext))).
% 78.37/77.73  all VarNext (v48(VarNext,bitIndex558)<->v468(VarNext,bitIndex94)).
% 78.37/77.73  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v470(VarNext)-> (v468(VarNext,bitIndex115)<->v48(VarCurr,bitIndex579))& (v468(VarNext,bitIndex114)<->v48(VarCurr,bitIndex578))& (v468(VarNext,bitIndex113)<->v48(VarCurr,bitIndex577))& (v468(VarNext,bitIndex112)<->v48(VarCurr,bitIndex576))& (v468(VarNext,bitIndex111)<->v48(VarCurr,bitIndex575))& (v468(VarNext,bitIndex110)<->v48(VarCurr,bitIndex574))& (v468(VarNext,bitIndex109)<->v48(VarCurr,bitIndex573))& (v468(VarNext,bitIndex108)<->v48(VarCurr,bitIndex572))& (v468(VarNext,bitIndex107)<->v48(VarCurr,bitIndex571))& (v468(VarNext,bitIndex106)<->v48(VarCurr,bitIndex570))& (v468(VarNext,bitIndex105)<->v48(VarCurr,bitIndex569))& (v468(VarNext,bitIndex104)<->v48(VarCurr,bitIndex568))& (v468(VarNext,bitIndex103)<->v48(VarCurr,bitIndex567))& (v468(VarNext,bitIndex102)<->v48(VarCurr,bitIndex566))& (v468(VarNext,bitIndex101)<->v48(VarCurr,bitIndex565))& (v468(VarNext,bitIndex100)<->v48(VarCurr,bitIndex564))& (v468(VarNext,bitIndex99)<->v48(VarCurr,bitIndex563))& (v468(VarNext,bitIndex98)<->v48(VarCurr,bitIndex562))& (v468(VarNext,bitIndex97)<->v48(VarCurr,bitIndex561))& (v468(VarNext,bitIndex96)<->v48(VarCurr,bitIndex560))& (v468(VarNext,bitIndex95)<->v48(VarCurr,bitIndex559))& (v468(VarNext,bitIndex94)<->v48(VarCurr,bitIndex558))& (v468(VarNext,bitIndex93)<->v48(VarCurr,bitIndex557))& (v468(VarNext,bitIndex92)<->v48(VarCurr,bitIndex556))& (v468(VarNext,bitIndex91)<->v48(VarCurr,bitIndex555))& (v468(VarNext,bitIndex90)<->v48(VarCurr,bitIndex554))& (v468(VarNext,bitIndex89)<->v48(VarCurr,bitIndex553))& (v468(VarNext,bitIndex88)<->v48(VarCurr,bitIndex552))& (v468(VarNext,bitIndex87)<->v48(VarCurr,bitIndex551))& (v468(VarNext,bitIndex86)<->v48(VarCurr,bitIndex550))& (v468(VarNext,bitIndex85)<->v48(VarCurr,bitIndex549))& (v468(VarNext,bitIndex84)<->v48(VarCurr,bitIndex548))& (v468(VarNext,bitIndex83)<->v48(VarCurr,bitIndex547))& (v468(VarNext,bitIndex82)<->v48(VarCurr,bitIndex546))& (v468(VarNext,bitIndex81)<->v48(VarCurr,bitIndex545))& (v468(VarNext,bitIndex80)<->v48(VarCurr,bitIndex544))& (v468(VarNext,bitIndex79)<->v48(VarCurr,bitIndex543))& (v468(VarNext,bitIndex78)<->v48(VarCurr,bitIndex542))& (v468(VarNext,bitIndex77)<->v48(VarCurr,bitIndex541))& (v468(VarNext,bitIndex76)<->v48(VarCurr,bitIndex540))& (v468(VarNext,bitIndex75)<->v48(VarCurr,bitIndex539))& (v468(VarNext,bitIndex74)<->v48(VarCurr,bitIndex538))& (v468(VarNext,bitIndex73)<->v48(VarCurr,bitIndex537))& (v468(VarNext,bitIndex72)<->v48(VarCurr,bitIndex536))& (v468(VarNext,bitIndex71)<->v48(VarCurr,bitIndex535))& (v468(VarNext,bitIndex70)<->v48(VarCurr,bitIndex534))& (v468(VarNext,bitIndex69)<->v48(VarCurr,bitIndex533))& (v468(VarNext,bitIndex68)<->v48(VarCurr,bitIndex532))& (v468(VarNext,bitIndex67)<->v48(VarCurr,bitIndex531))& (v468(VarNext,bitIndex66)<->v48(VarCurr,bitIndex530))& (v468(VarNext,bitIndex65)<->v48(VarCurr,bitIndex529))& (v468(VarNext,bitIndex64)<->v48(VarCurr,bitIndex528))& (v468(VarNext,bitIndex63)<->v48(VarCurr,bitIndex527))& (v468(VarNext,bitIndex62)<->v48(VarCurr,bitIndex526))& (v468(VarNext,bitIndex61)<->v48(VarCurr,bitIndex525))& (v468(VarNext,bitIndex60)<->v48(VarCurr,bitIndex524))& (v468(VarNext,bitIndex59)<->v48(VarCurr,bitIndex523))& (v468(VarNext,bitIndex58)<->v48(VarCurr,bitIndex522))& (v468(VarNext,bitIndex57)<->v48(VarCurr,bitIndex521))& (v468(VarNext,bitIndex56)<->v48(VarCurr,bitIndex520))& (v468(VarNext,bitIndex55)<->v48(VarCurr,bitIndex519))& (v468(VarNext,bitIndex54)<->v48(VarCurr,bitIndex518))& (v468(VarNext,bitIndex53)<->v48(VarCurr,bitIndex517))& (v468(VarNext,bitIndex52)<->v48(VarCurr,bitIndex516))& (v468(VarNext,bitIndex51)<->v48(VarCurr,bitIndex515))& (v468(VarNext,bitIndex50)<->v48(VarCurr,bitIndex514))& (v468(VarNext,bitIndex49)<->v48(VarCurr,bitIndex513))& (v468(VarNext,bitIndex48)<->v48(VarCurr,bitIndex512))& (v468(VarNext,bitIndex47)<->v48(VarCurr,bitIndex511))& (v468(VarNext,bitIndex46)<->v48(VarCurr,bitIndex510))& (v468(VarNext,bitIndex45)<->v48(VarCurr,bitIndex509))& (v468(VarNext,bitIndex44)<->v48(VarCurr,bitIndex508))& (v468(VarNext,bitIndex43)<->v48(VarCurr,bitIndex507))& (v468(VarNext,bitIndex42)<->v48(VarCurr,bitIndex506))& (v468(VarNext,bitIndex41)<->v48(VarCurr,bitIndex505))& (v468(VarNext,bitIndex40)<->v48(VarCurr,bitIndex504))& (v468(VarNext,bitIndex39)<->v48(VarCurr,bitIndex503))& (v468(VarNext,bitIndex38)<->v48(VarCurr,bitIndex502))& (v468(VarNext,bitIndex37)<->v48(VarCurr,bitIndex501))& (v468(VarNext,bitIndex36)<->v48(VarCurr,bitIndex500))& (v468(VarNext,bitIndex35)<->v48(VarCurr,bitIndex499))& (v468(VarNext,bitIndex34)<->v48(VarCurr,bitIndex498))& (v468(VarNext,bitIndex33)<->v48(VarCurr,bitIndex497))& (v468(VarNext,bitIndex32)<->v48(VarCurr,bitIndex496))& (v468(VarNext,bitIndex31)<->v48(VarCurr,bitIndex495))& (v468(VarNext,bitIndex30)<->v48(VarCurr,bitIndex494))& (v468(VarNext,bitIndex29)<->v48(VarCurr,bitIndex493))& (v468(VarNext,bitIndex28)<->v48(VarCurr,bitIndex492))& (v468(VarNext,bitIndex27)<->v48(VarCurr,bitIndex491))& (v468(VarNext,bitIndex26)<->v48(VarCurr,bitIndex490))& (v468(VarNext,bitIndex25)<->v48(VarCurr,bitIndex489))& (v468(VarNext,bitIndex24)<->v48(VarCurr,bitIndex488))& (v468(VarNext,bitIndex23)<->v48(VarCurr,bitIndex487))& (v468(VarNext,bitIndex22)<->v48(VarCurr,bitIndex486))& (v468(VarNext,bitIndex21)<->v48(VarCurr,bitIndex485))& (v468(VarNext,bitIndex20)<->v48(VarCurr,bitIndex484))& (v468(VarNext,bitIndex19)<->v48(VarCurr,bitIndex483))& (v468(VarNext,bitIndex18)<->v48(VarCurr,bitIndex482))& (v468(VarNext,bitIndex17)<->v48(VarCurr,bitIndex481))& (v468(VarNext,bitIndex16)<->v48(VarCurr,bitIndex480))& (v468(VarNext,bitIndex15)<->v48(VarCurr,bitIndex479))& (v468(VarNext,bitIndex14)<->v48(VarCurr,bitIndex478))& (v468(VarNext,bitIndex13)<->v48(VarCurr,bitIndex477))& (v468(VarNext,bitIndex12)<->v48(VarCurr,bitIndex476))& (v468(VarNext,bitIndex11)<->v48(VarCurr,bitIndex475))& (v468(VarNext,bitIndex10)<->v48(VarCurr,bitIndex474))& (v468(VarNext,bitIndex9)<->v48(VarCurr,bitIndex473))& (v468(VarNext,bitIndex8)<->v48(VarCurr,bitIndex472))& (v468(VarNext,bitIndex7)<->v48(VarCurr,bitIndex471))& (v468(VarNext,bitIndex6)<->v48(VarCurr,bitIndex470))& (v468(VarNext,bitIndex5)<->v48(VarCurr,bitIndex469))& (v468(VarNext,bitIndex4)<->v48(VarCurr,bitIndex468))& (v468(VarNext,bitIndex3)<->v48(VarCurr,bitIndex467))& (v468(VarNext,bitIndex2)<->v48(VarCurr,bitIndex466))& (v468(VarNext,bitIndex1)<->v48(VarCurr,bitIndex465))& (v468(VarNext,bitIndex0)<->v48(VarCurr,bitIndex464)))).
% 78.37/77.73  all VarNext (v470(VarNext)-> (all B (range_115_0(B)-> (v468(VarNext,B)<->v219(VarNext,B))))).
% 78.37/77.73  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v470(VarNext)<->v471(VarNext)&v213(VarNext))).
% 78.37/77.73  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v471(VarNext)<->v473(VarNext)&v188(VarNext))).
% 78.37/77.73  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v473(VarNext)<->v207(VarNext))).
% 78.37/77.73  all VarCurr (v180(VarCurr,bitIndex94)<->v182(VarCurr,bitIndex94)).
% 78.37/77.73  all VarCurr (v182(VarCurr,bitIndex94)<->v184(VarCurr,bitIndex94)).
% 78.37/77.73  all VarCurr (v184(VarCurr,bitIndex94)<->v186(VarCurr,bitIndex94)).
% 78.37/77.73  all VarCurr (v395(VarCurr)<-> (v397(VarCurr,bitIndex3)<->v380(VarCurr,bitIndex1))& (v397(VarCurr,bitIndex2)<->v380(VarCurr,bitIndex0))).
% 78.37/77.73  all B (range_1_0(B)-> (v380(constB0,B)<->$F)).
% 78.37/77.73  all B (range_1_0(B)<->bitIndex0=B|bitIndex1=B).
% 78.37/77.73  all VarCurr B (range_3_2(B)-> (v397(VarCurr,B)<->v443(VarCurr,B))).
% 78.37/77.73  all VarCurr (-v24(VarCurr)-> (all B (range_3_0(B)-> (v443(VarCurr,B)<->v421(VarCurr,B))))).
% 78.37/77.73  all VarCurr (v24(VarCurr)-> (all B (range_3_0(B)-> (v443(VarCurr,B)<->v399(VarCurr,B))))).
% 78.37/77.73  all VarNext B (range_3_2(B)-> (v421(VarNext,B)<->v426(VarNext,B))).
% 78.37/77.73  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v427(VarNext)-> (all B (range_3_0(B)-> (v426(VarNext,B)<->v421(VarCurr,B)))))).
% 78.37/77.73  all VarNext (v427(VarNext)-> (all B (range_3_0(B)-> (v426(VarNext,B)<->v440(VarNext,B))))).
% 78.37/77.73  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_3_0(B)-> (v440(VarNext,B)<->v438(VarCurr,B))))).
% 78.37/77.73  all VarCurr (-v369(VarCurr)-> (all B (range_3_0(B)-> (v438(VarCurr,B)<->v399(VarCurr,B))))).
% 78.37/77.73  all VarCurr (v369(VarCurr)-> (all B (range_3_0(B)-> (v438(VarCurr,B)<->$F)))).
% 78.37/77.73  all B (range_3_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B).
% 78.37/77.73  -b0000(bitIndex3).
% 78.37/77.73  -b0000(bitIndex2).
% 78.37/77.73  -b0000(bitIndex1).
% 78.37/77.73  -b0000(bitIndex0).
% 78.37/77.73  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v427(VarNext)<->v428(VarNext)&v435(VarNext))).
% 78.37/77.73  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v435(VarNext)<->v433(VarCurr))).
% 78.37/77.73  all VarCurr (v433(VarCurr)<->v369(VarCurr)|v436(VarCurr)).
% 78.37/77.73  all VarCurr (v436(VarCurr)<->v24(VarCurr)&v437(VarCurr)).
% 78.37/77.73  all VarCurr (-v437(VarCurr)<->v369(VarCurr)).
% 78.37/77.73  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v428(VarNext)<->v430(VarNext)&v355(VarNext))).
% 78.37/77.73  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v430(VarNext)<->v362(VarNext))).
% 78.37/77.73  -v421(constB0,bitIndex3).
% 78.37/77.73  -v421(constB0,bitIndex2).
% 78.37/77.73  -b00xx(bitIndex3).
% 78.37/77.73  -b00xx(bitIndex2).
% 78.37/77.73  all VarCurr B (range_3_2(B)-> (v399(VarCurr,B)<->v401(VarCurr,B))).
% 78.37/77.73  all VarCurr B (range_3_2(B)-> (v401(VarCurr,B)<->v403(VarCurr,B))).
% 78.37/77.73  all VarCurr B (range_3_2(B)-> (v403(VarCurr,B)<->v40(VarCurr,B))).
% 78.37/77.73  all VarCurr B (range_3_2(B)-> (v40(VarCurr,B)<->v42(VarCurr,B))).
% 78.37/77.73  all VarCurr B (range_3_2(B)-> (v42(VarCurr,B)<->v44(VarCurr,B))).
% 78.37/77.73  all VarCurr B (range_3_2(B)-> (v44(VarCurr,B)<->v46(VarCurr,B))).
% 78.37/77.73  all VarCurr ((v46(VarCurr,bitIndex3)<->v48(VarCurr,bitIndex583))& (v46(VarCurr,bitIndex2)<->v48(VarCurr,bitIndex582))).
% 78.37/77.73  all VarNext ((v48(VarNext,bitIndex583)<->v413(VarNext,bitIndex3))& (v48(VarNext,bitIndex582)<->v413(VarNext,bitIndex2))).
% 78.37/77.73  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v415(VarNext)-> (v413(VarNext,bitIndex115)<->v48(VarCurr,bitIndex695))& (v413(VarNext,bitIndex114)<->v48(VarCurr,bitIndex694))& (v413(VarNext,bitIndex113)<->v48(VarCurr,bitIndex693))& (v413(VarNext,bitIndex112)<->v48(VarCurr,bitIndex692))& (v413(VarNext,bitIndex111)<->v48(VarCurr,bitIndex691))& (v413(VarNext,bitIndex110)<->v48(VarCurr,bitIndex690))& (v413(VarNext,bitIndex109)<->v48(VarCurr,bitIndex689))& (v413(VarNext,bitIndex108)<->v48(VarCurr,bitIndex688))& (v413(VarNext,bitIndex107)<->v48(VarCurr,bitIndex687))& (v413(VarNext,bitIndex106)<->v48(VarCurr,bitIndex686))& (v413(VarNext,bitIndex105)<->v48(VarCurr,bitIndex685))& (v413(VarNext,bitIndex104)<->v48(VarCurr,bitIndex684))& (v413(VarNext,bitIndex103)<->v48(VarCurr,bitIndex683))& (v413(VarNext,bitIndex102)<->v48(VarCurr,bitIndex682))& (v413(VarNext,bitIndex101)<->v48(VarCurr,bitIndex681))& (v413(VarNext,bitIndex100)<->v48(VarCurr,bitIndex680))& (v413(VarNext,bitIndex99)<->v48(VarCurr,bitIndex679))& (v413(VarNext,bitIndex98)<->v48(VarCurr,bitIndex678))& (v413(VarNext,bitIndex97)<->v48(VarCurr,bitIndex677))& (v413(VarNext,bitIndex96)<->v48(VarCurr,bitIndex676))& (v413(VarNext,bitIndex95)<->v48(VarCurr,bitIndex675))& (v413(VarNext,bitIndex94)<->v48(VarCurr,bitIndex674))& (v413(VarNext,bitIndex93)<->v48(VarCurr,bitIndex673))& (v413(VarNext,bitIndex92)<->v48(VarCurr,bitIndex672))& (v413(VarNext,bitIndex91)<->v48(VarCurr,bitIndex671))& (v413(VarNext,bitIndex90)<->v48(VarCurr,bitIndex670))& (v413(VarNext,bitIndex89)<->v48(VarCurr,bitIndex669))& (v413(VarNext,bitIndex88)<->v48(VarCurr,bitIndex668))& (v413(VarNext,bitIndex87)<->v48(VarCurr,bitIndex667))& (v413(VarNext,bitIndex86)<->v48(VarCurr,bitIndex666))& (v413(VarNext,bitIndex85)<->v48(VarCurr,bitIndex665))& (v413(VarNext,bitIndex84)<->v48(VarCurr,bitIndex664))& (v413(VarNext,bitIndex83)<->v48(VarCurr,bitIndex663))& (v413(VarNext,bitIndex82)<->v48(VarCurr,bitIndex662))& (v413(VarNext,bitIndex81)<->v48(VarCurr,bitIndex661))& (v413(VarNext,bitIndex80)<->v48(VarCurr,bitIndex660))& (v413(VarNext,bitIndex79)<->v48(VarCurr,bitIndex659))& (v413(VarNext,bitIndex78)<->v48(VarCurr,bitIndex658))& (v413(VarNext,bitIndex77)<->v48(VarCurr,bitIndex657))& (v413(VarNext,bitIndex76)<->v48(VarCurr,bitIndex656))& (v413(VarNext,bitIndex75)<->v48(VarCurr,bitIndex655))& (v413(VarNext,bitIndex74)<->v48(VarCurr,bitIndex654))& (v413(VarNext,bitIndex73)<->v48(VarCurr,bitIndex653))& (v413(VarNext,bitIndex72)<->v48(VarCurr,bitIndex652))& (v413(VarNext,bitIndex71)<->v48(VarCurr,bitIndex651))& (v413(VarNext,bitIndex70)<->v48(VarCurr,bitIndex650))& (v413(VarNext,bitIndex69)<->v48(VarCurr,bitIndex649))& (v413(VarNext,bitIndex68)<->v48(VarCurr,bitIndex648))& (v413(VarNext,bitIndex67)<->v48(VarCurr,bitIndex647))& (v413(VarNext,bitIndex66)<->v48(VarCurr,bitIndex646))& (v413(VarNext,bitIndex65)<->v48(VarCurr,bitIndex645))& (v413(VarNext,bitIndex64)<->v48(VarCurr,bitIndex644))& (v413(VarNext,bitIndex63)<->v48(VarCurr,bitIndex643))& (v413(VarNext,bitIndex62)<->v48(VarCurr,bitIndex642))& (v413(VarNext,bitIndex61)<->v48(VarCurr,bitIndex641))& (v413(VarNext,bitIndex60)<->v48(VarCurr,bitIndex640))& (v413(VarNext,bitIndex59)<->v48(VarCurr,bitIndex639))& (v413(VarNext,bitIndex58)<->v48(VarCurr,bitIndex638))& (v413(VarNext,bitIndex57)<->v48(VarCurr,bitIndex637))& (v413(VarNext,bitIndex56)<->v48(VarCurr,bitIndex636))& (v413(VarNext,bitIndex55)<->v48(VarCurr,bitIndex635))& (v413(VarNext,bitIndex54)<->v48(VarCurr,bitIndex634))& (v413(VarNext,bitIndex53)<->v48(VarCurr,bitIndex633))& (v413(VarNext,bitIndex52)<->v48(VarCurr,bitIndex632))& (v413(VarNext,bitIndex51)<->v48(VarCurr,bitIndex631))& (v413(VarNext,bitIndex50)<->v48(VarCurr,bitIndex630))& (v413(VarNext,bitIndex49)<->v48(VarCurr,bitIndex629))& (v413(VarNext,bitIndex48)<->v48(VarCurr,bitIndex628))& (v413(VarNext,bitIndex47)<->v48(VarCurr,bitIndex627))& (v413(VarNext,bitIndex46)<->v48(VarCurr,bitIndex626))& (v413(VarNext,bitIndex45)<->v48(VarCurr,bitIndex625))& (v413(VarNext,bitIndex44)<->v48(VarCurr,bitIndex624))& (v413(VarNext,bitIndex43)<->v48(VarCurr,bitIndex623))& (v413(VarNext,bitIndex42)<->v48(VarCurr,bitIndex622))& (v413(VarNext,bitIndex41)<->v48(VarCurr,bitIndex621))& (v413(VarNext,bitIndex40)<->v48(VarCurr,bitIndex620))& (v413(VarNext,bitIndex39)<->v48(VarCurr,bitIndex619))& (v413(VarNext,bitIndex38)<->v48(VarCurr,bitIndex618))& (v413(VarNext,bitIndex37)<->v48(VarCurr,bitIndex617))& (v413(VarNext,bitIndex36)<->v48(VarCurr,bitIndex616))& (v413(VarNext,bitIndex35)<->v48(VarCurr,bitIndex615))& (v413(VarNext,bitIndex34)<->v48(VarCurr,bitIndex614))& (v413(VarNext,bitIndex33)<->v48(VarCurr,bitIndex613))& (v413(VarNext,bitIndex32)<->v48(VarCurr,bitIndex612))& (v413(VarNext,bitIndex31)<->v48(VarCurr,bitIndex611))& (v413(VarNext,bitIndex30)<->v48(VarCurr,bitIndex610))& (v413(VarNext,bitIndex29)<->v48(VarCurr,bitIndex609))& (v413(VarNext,bitIndex28)<->v48(VarCurr,bitIndex608))& (v413(VarNext,bitIndex27)<->v48(VarCurr,bitIndex607))& (v413(VarNext,bitIndex26)<->v48(VarCurr,bitIndex606))& (v413(VarNext,bitIndex25)<->v48(VarCurr,bitIndex605))& (v413(VarNext,bitIndex24)<->v48(VarCurr,bitIndex604))& (v413(VarNext,bitIndex23)<->v48(VarCurr,bitIndex603))& (v413(VarNext,bitIndex22)<->v48(VarCurr,bitIndex602))& (v413(VarNext,bitIndex21)<->v48(VarCurr,bitIndex601))& (v413(VarNext,bitIndex20)<->v48(VarCurr,bitIndex600))& (v413(VarNext,bitIndex19)<->v48(VarCurr,bitIndex599))& (v413(VarNext,bitIndex18)<->v48(VarCurr,bitIndex598))& (v413(VarNext,bitIndex17)<->v48(VarCurr,bitIndex597))& (v413(VarNext,bitIndex16)<->v48(VarCurr,bitIndex596))& (v413(VarNext,bitIndex15)<->v48(VarCurr,bitIndex595))& (v413(VarNext,bitIndex14)<->v48(VarCurr,bitIndex594))& (v413(VarNext,bitIndex13)<->v48(VarCurr,bitIndex593))& (v413(VarNext,bitIndex12)<->v48(VarCurr,bitIndex592))& (v413(VarNext,bitIndex11)<->v48(VarCurr,bitIndex591))& (v413(VarNext,bitIndex10)<->v48(VarCurr,bitIndex590))& (v413(VarNext,bitIndex9)<->v48(VarCurr,bitIndex589))& (v413(VarNext,bitIndex8)<->v48(VarCurr,bitIndex588))& (v413(VarNext,bitIndex7)<->v48(VarCurr,bitIndex587))& (v413(VarNext,bitIndex6)<->v48(VarCurr,bitIndex586))& (v413(VarNext,bitIndex5)<->v48(VarCurr,bitIndex585))& (v413(VarNext,bitIndex4)<->v48(VarCurr,bitIndex584))& (v413(VarNext,bitIndex3)<->v48(VarCurr,bitIndex583))& (v413(VarNext,bitIndex2)<->v48(VarCurr,bitIndex582))& (v413(VarNext,bitIndex1)<->v48(VarCurr,bitIndex581))& (v413(VarNext,bitIndex0)<->v48(VarCurr,bitIndex580)))).
% 78.37/77.74  all VarNext (v415(VarNext)-> (all B (range_115_0(B)-> (v413(VarNext,B)<->v238(VarNext,B))))).
% 78.37/77.74  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v415(VarNext)<->v416(VarNext)&v233(VarNext))).
% 78.37/77.74  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v416(VarNext)<->v418(VarNext)&v188(VarNext))).
% 78.37/77.74  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v418(VarNext)<->v207(VarNext))).
% 78.37/77.74  all VarNext ((v48(VarNext,bitIndex467)<->v405(VarNext,bitIndex3))& (v48(VarNext,bitIndex466)<->v405(VarNext,bitIndex2))).
% 78.37/77.74  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v407(VarNext)-> (v405(VarNext,bitIndex115)<->v48(VarCurr,bitIndex579))& (v405(VarNext,bitIndex114)<->v48(VarCurr,bitIndex578))& (v405(VarNext,bitIndex113)<->v48(VarCurr,bitIndex577))& (v405(VarNext,bitIndex112)<->v48(VarCurr,bitIndex576))& (v405(VarNext,bitIndex111)<->v48(VarCurr,bitIndex575))& (v405(VarNext,bitIndex110)<->v48(VarCurr,bitIndex574))& (v405(VarNext,bitIndex109)<->v48(VarCurr,bitIndex573))& (v405(VarNext,bitIndex108)<->v48(VarCurr,bitIndex572))& (v405(VarNext,bitIndex107)<->v48(VarCurr,bitIndex571))& (v405(VarNext,bitIndex106)<->v48(VarCurr,bitIndex570))& (v405(VarNext,bitIndex105)<->v48(VarCurr,bitIndex569))& (v405(VarNext,bitIndex104)<->v48(VarCurr,bitIndex568))& (v405(VarNext,bitIndex103)<->v48(VarCurr,bitIndex567))& (v405(VarNext,bitIndex102)<->v48(VarCurr,bitIndex566))& (v405(VarNext,bitIndex101)<->v48(VarCurr,bitIndex565))& (v405(VarNext,bitIndex100)<->v48(VarCurr,bitIndex564))& (v405(VarNext,bitIndex99)<->v48(VarCurr,bitIndex563))& (v405(VarNext,bitIndex98)<->v48(VarCurr,bitIndex562))& (v405(VarNext,bitIndex97)<->v48(VarCurr,bitIndex561))& (v405(VarNext,bitIndex96)<->v48(VarCurr,bitIndex560))& (v405(VarNext,bitIndex95)<->v48(VarCurr,bitIndex559))& (v405(VarNext,bitIndex94)<->v48(VarCurr,bitIndex558))& (v405(VarNext,bitIndex93)<->v48(VarCurr,bitIndex557))& (v405(VarNext,bitIndex92)<->v48(VarCurr,bitIndex556))& (v405(VarNext,bitIndex91)<->v48(VarCurr,bitIndex555))& (v405(VarNext,bitIndex90)<->v48(VarCurr,bitIndex554))& (v405(VarNext,bitIndex89)<->v48(VarCurr,bitIndex553))& (v405(VarNext,bitIndex88)<->v48(VarCurr,bitIndex552))& (v405(VarNext,bitIndex87)<->v48(VarCurr,bitIndex551))& (v405(VarNext,bitIndex86)<->v48(VarCurr,bitIndex550))& (v405(VarNext,bitIndex85)<->v48(VarCurr,bitIndex549))& (v405(VarNext,bitIndex84)<->v48(VarCurr,bitIndex548))& (v405(VarNext,bitIndex83)<->v48(VarCurr,bitIndex547))& (v405(VarNext,bitIndex82)<->v48(VarCurr,bitIndex546))& (v405(VarNext,bitIndex81)<->v48(VarCurr,bitIndex545))& (v405(VarNext,bitIndex80)<->v48(VarCurr,bitIndex544))& (v405(VarNext,bitIndex79)<->v48(VarCurr,bitIndex543))& (v405(VarNext,bitIndex78)<->v48(VarCurr,bitIndex542))& (v405(VarNext,bitIndex77)<->v48(VarCurr,bitIndex541))& (v405(VarNext,bitIndex76)<->v48(VarCurr,bitIndex540))& (v405(VarNext,bitIndex75)<->v48(VarCurr,bitIndex539))& (v405(VarNext,bitIndex74)<->v48(VarCurr,bitIndex538))& (v405(VarNext,bitIndex73)<->v48(VarCurr,bitIndex537))& (v405(VarNext,bitIndex72)<->v48(VarCurr,bitIndex536))& (v405(VarNext,bitIndex71)<->v48(VarCurr,bitIndex535))& (v405(VarNext,bitIndex70)<->v48(VarCurr,bitIndex534))& (v405(VarNext,bitIndex69)<->v48(VarCurr,bitIndex533))& (v405(VarNext,bitIndex68)<->v48(VarCurr,bitIndex532))& (v405(VarNext,bitIndex67)<->v48(VarCurr,bitIndex531))& (v405(VarNext,bitIndex66)<->v48(VarCurr,bitIndex530))& (v405(VarNext,bitIndex65)<->v48(VarCurr,bitIndex529))& (v405(VarNext,bitIndex64)<->v48(VarCurr,bitIndex528))& (v405(VarNext,bitIndex63)<->v48(VarCurr,bitIndex527))& (v405(VarNext,bitIndex62)<->v48(VarCurr,bitIndex526))& (v405(VarNext,bitIndex61)<->v48(VarCurr,bitIndex525))& (v405(VarNext,bitIndex60)<->v48(VarCurr,bitIndex524))& (v405(VarNext,bitIndex59)<->v48(VarCurr,bitIndex523))& (v405(VarNext,bitIndex58)<->v48(VarCurr,bitIndex522))& (v405(VarNext,bitIndex57)<->v48(VarCurr,bitIndex521))& (v405(VarNext,bitIndex56)<->v48(VarCurr,bitIndex520))& (v405(VarNext,bitIndex55)<->v48(VarCurr,bitIndex519))& (v405(VarNext,bitIndex54)<->v48(VarCurr,bitIndex518))& (v405(VarNext,bitIndex53)<->v48(VarCurr,bitIndex517))& (v405(VarNext,bitIndex52)<->v48(VarCurr,bitIndex516))& (v405(VarNext,bitIndex51)<->v48(VarCurr,bitIndex515))& (v405(VarNext,bitIndex50)<->v48(VarCurr,bitIndex514))& (v405(VarNext,bitIndex49)<->v48(VarCurr,bitIndex513))& (v405(VarNext,bitIndex48)<->v48(VarCurr,bitIndex512))& (v405(VarNext,bitIndex47)<->v48(VarCurr,bitIndex511))& (v405(VarNext,bitIndex46)<->v48(VarCurr,bitIndex510))& (v405(VarNext,bitIndex45)<->v48(VarCurr,bitIndex509))& (v405(VarNext,bitIndex44)<->v48(VarCurr,bitIndex508))& (v405(VarNext,bitIndex43)<->v48(VarCurr,bitIndex507))& (v405(VarNext,bitIndex42)<->v48(VarCurr,bitIndex506))& (v405(VarNext,bitIndex41)<->v48(VarCurr,bitIndex505))& (v405(VarNext,bitIndex40)<->v48(VarCurr,bitIndex504))& (v405(VarNext,bitIndex39)<->v48(VarCurr,bitIndex503))& (v405(VarNext,bitIndex38)<->v48(VarCurr,bitIndex502))& (v405(VarNext,bitIndex37)<->v48(VarCurr,bitIndex501))& (v405(VarNext,bitIndex36)<->v48(VarCurr,bitIndex500))& (v405(VarNext,bitIndex35)<->v48(VarCurr,bitIndex499))& (v405(VarNext,bitIndex34)<->v48(VarCurr,bitIndex498))& (v405(VarNext,bitIndex33)<->v48(VarCurr,bitIndex497))& (v405(VarNext,bitIndex32)<->v48(VarCurr,bitIndex496))& (v405(VarNext,bitIndex31)<->v48(VarCurr,bitIndex495))& (v405(VarNext,bitIndex30)<->v48(VarCurr,bitIndex494))& (v405(VarNext,bitIndex29)<->v48(VarCurr,bitIndex493))& (v405(VarNext,bitIndex28)<->v48(VarCurr,bitIndex492))& (v405(VarNext,bitIndex27)<->v48(VarCurr,bitIndex491))& (v405(VarNext,bitIndex26)<->v48(VarCurr,bitIndex490))& (v405(VarNext,bitIndex25)<->v48(VarCurr,bitIndex489))& (v405(VarNext,bitIndex24)<->v48(VarCurr,bitIndex488))& (v405(VarNext,bitIndex23)<->v48(VarCurr,bitIndex487))& (v405(VarNext,bitIndex22)<->v48(VarCurr,bitIndex486))& (v405(VarNext,bitIndex21)<->v48(VarCurr,bitIndex485))& (v405(VarNext,bitIndex20)<->v48(VarCurr,bitIndex484))& (v405(VarNext,bitIndex19)<->v48(VarCurr,bitIndex483))& (v405(VarNext,bitIndex18)<->v48(VarCurr,bitIndex482))& (v405(VarNext,bitIndex17)<->v48(VarCurr,bitIndex481))& (v405(VarNext,bitIndex16)<->v48(VarCurr,bitIndex480))& (v405(VarNext,bitIndex15)<->v48(VarCurr,bitIndex479))& (v405(VarNext,bitIndex14)<->v48(VarCurr,bitIndex478))& (v405(VarNext,bitIndex13)<->v48(VarCurr,bitIndex477))& (v405(VarNext,bitIndex12)<->v48(VarCurr,bitIndex476))& (v405(VarNext,bitIndex11)<->v48(VarCurr,bitIndex475))& (v405(VarNext,bitIndex10)<->v48(VarCurr,bitIndex474))& (v405(VarNext,bitIndex9)<->v48(VarCurr,bitIndex473))& (v405(VarNext,bitIndex8)<->v48(VarCurr,bitIndex472))& (v405(VarNext,bitIndex7)<->v48(VarCurr,bitIndex471))& (v405(VarNext,bitIndex6)<->v48(VarCurr,bitIndex470))& (v405(VarNext,bitIndex5)<->v48(VarCurr,bitIndex469))& (v405(VarNext,bitIndex4)<->v48(VarCurr,bitIndex468))& (v405(VarNext,bitIndex3)<->v48(VarCurr,bitIndex467))& (v405(VarNext,bitIndex2)<->v48(VarCurr,bitIndex466))& (v405(VarNext,bitIndex1)<->v48(VarCurr,bitIndex465))& (v405(VarNext,bitIndex0)<->v48(VarCurr,bitIndex464)))).
% 78.37/77.74  all VarNext (v407(VarNext)-> (all B (range_115_0(B)-> (v405(VarNext,B)<->v219(VarNext,B))))).
% 78.37/77.74  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v407(VarNext)<->v408(VarNext)&v213(VarNext))).
% 78.37/77.74  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v408(VarNext)<->v410(VarNext)&v188(VarNext))).
% 78.37/77.74  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v410(VarNext)<->v207(VarNext))).
% 78.37/77.74  all VarCurr B (range_3_2(B)-> (v180(VarCurr,B)<->v182(VarCurr,B))).
% 78.37/77.74  all VarCurr B (range_3_2(B)-> (v182(VarCurr,B)<->v184(VarCurr,B))).
% 78.37/77.74  all VarCurr B (range_3_2(B)-> (v184(VarCurr,B)<->v186(VarCurr,B))).
% 78.37/77.74  all B (range_3_2(B)<->bitIndex2=B|bitIndex3=B).
% 78.37/77.74  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v358(VarNext)-> (v338(VarNext)<->v338(VarCurr)))).
% 78.37/77.74  all VarNext (v358(VarNext)-> (v338(VarNext)<->v374(VarNext))).
% 78.37/77.74  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v374(VarNext)<->v372(VarCurr))).
% 78.37/77.74  all VarCurr (-v369(VarCurr)-> (v372(VarCurr)<->v340(VarCurr))).
% 78.37/77.74  all VarCurr (v369(VarCurr)-> (v372(VarCurr)<->$F)).
% 78.37/77.74  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v358(VarNext)<->v359(VarNext)&v368(VarNext))).
% 78.37/77.74  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v368(VarNext)<->v366(VarCurr))).
% 78.37/77.74  all VarCurr (v366(VarCurr)<->v369(VarCurr)|v370(VarCurr)).
% 78.37/77.74  all VarCurr (v370(VarCurr)<->v24(VarCurr)&v371(VarCurr)).
% 78.37/77.74  all VarCurr (-v371(VarCurr)<->v369(VarCurr)).
% 78.37/77.74  all VarCurr (-v369(VarCurr)<->v15(VarCurr)).
% 78.37/77.74  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v359(VarNext)<->v360(VarNext)&v355(VarNext))).
% 78.37/77.74  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v360(VarNext)<->v362(VarNext))).
% 78.37/77.74  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v362(VarNext)<->v355(VarCurr))).
% 78.37/77.74  v338(constB0)<->$F.
% 78.37/77.74  all VarCurr (v355(VarCurr)<->v1(VarCurr)).
% 78.37/77.74  all VarCurr (v340(VarCurr)<->v342(VarCurr)).
% 78.37/77.74  all VarCurr (v342(VarCurr)<->v344(VarCurr)).
% 78.37/77.74  all VarCurr (-v347(VarCurr)-> (v344(VarCurr)<->$F)).
% 78.37/77.74  all VarCurr (v347(VarCurr)-> (v344(VarCurr)<->$T)).
% 78.37/77.74  all VarCurr (v347(VarCurr)<->v349(VarCurr)&v159(VarCurr,bitIndex6)).
% 78.37/77.74  all VarCurr (v349(VarCurr)<->v350(VarCurr)&v311(VarCurr)).
% 78.37/77.74  all VarCurr (v350(VarCurr)<->v351(VarCurr)&v310(VarCurr)).
% 78.37/77.74  all VarCurr (v351(VarCurr)<->v352(VarCurr)&v159(VarCurr,bitIndex3)).
% 78.37/77.74  all VarCurr (v352(VarCurr)<->v353(VarCurr)&v308(VarCurr)).
% 78.37/77.74  all VarCurr (v353(VarCurr)<->v306(VarCurr)&v159(VarCurr,bitIndex1)).
% 78.37/77.74  all VarCurr (v330(VarCurr)<->v17(VarCurr)).
% 78.37/77.74  all VarCurr B (range_4_0(B)-> (v324(VarCurr,B)<->v326(VarCurr,B))).
% 78.37/77.74  all B (range_4_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B).
% 78.37/77.74  all VarCurr (v153(VarCurr)<->v155(VarCurr)).
% 78.37/77.74  all VarCurr (v155(VarCurr)<->v157(VarCurr)).
% 78.37/77.74  all VarCurr (-v296(VarCurr)-> (v157(VarCurr)<->$F)).
% 78.37/77.74  all VarCurr (v296(VarCurr)-> (v157(VarCurr)<->$T)).
% 78.37/77.74  all VarCurr (v296(VarCurr)<->v297(VarCurr)|v312(VarCurr)).
% 78.37/77.74  all VarCurr (v312(VarCurr)<->v313(VarCurr)&v314(VarCurr)).
% 78.37/77.74  all VarCurr (v314(VarCurr)<->v316(VarCurr)&v159(VarCurr,bitIndex6)).
% 78.37/77.74  all VarCurr (v316(VarCurr)<->v302(VarCurr)&v159(VarCurr,bitIndex5)).
% 78.37/77.74  all VarCurr (-v313(VarCurr)<->v244(VarCurr)).
% 78.37/77.74  all VarCurr (v297(VarCurr)<->v298(VarCurr)&v299(VarCurr)).
% 78.37/77.74  all VarCurr (v299(VarCurr)<->v301(VarCurr)&v159(VarCurr,bitIndex6)).
% 78.37/77.74  all VarCurr (v301(VarCurr)<->v302(VarCurr)&v311(VarCurr)).
% 78.37/77.74  all VarCurr (-v311(VarCurr)<->v159(VarCurr,bitIndex5)).
% 78.37/77.74  all VarCurr (v302(VarCurr)<->v303(VarCurr)&v310(VarCurr)).
% 78.37/77.74  all VarCurr (-v310(VarCurr)<->v159(VarCurr,bitIndex4)).
% 78.37/77.74  all VarCurr (v303(VarCurr)<->v304(VarCurr)&v309(VarCurr)).
% 78.37/77.74  all VarCurr (-v309(VarCurr)<->v159(VarCurr,bitIndex3)).
% 78.37/77.74  all VarCurr (v304(VarCurr)<->v305(VarCurr)&v308(VarCurr)).
% 78.37/77.74  all VarCurr (-v308(VarCurr)<->v159(VarCurr,bitIndex2)).
% 78.37/77.74  all VarCurr (v305(VarCurr)<->v306(VarCurr)&v307(VarCurr)).
% 78.37/77.74  all VarCurr (-v307(VarCurr)<->v159(VarCurr,bitIndex1)).
% 78.37/77.74  all VarCurr (-v306(VarCurr)<->v159(VarCurr,bitIndex0)).
% 78.37/77.74  all VarCurr (-v298(VarCurr)<->v38(VarCurr)).
% 78.37/77.74  all VarCurr (-v290(VarCurr)-> (v244(VarCurr)<->v293(VarCurr))).
% 78.37/77.74  all VarCurr (v290(VarCurr)-> (v244(VarCurr)<->v292(VarCurr))).
% 78.37/77.74  all VarCurr (v293(VarCurr)<-> (v40(VarCurr,bitIndex61)<->v270(VarCurr,bitIndex47))& (v40(VarCurr,bitIndex60)<->v270(VarCurr,bitIndex46))& (v40(VarCurr,bitIndex59)<->v270(VarCurr,bitIndex45))& (v40(VarCurr,bitIndex58)<->v270(VarCurr,bitIndex44))& (v40(VarCurr,bitIndex57)<->v270(VarCurr,bitIndex43))& (v40(VarCurr,bitIndex56)<->v270(VarCurr,bitIndex42))& (v40(VarCurr,bitIndex55)<->v270(VarCurr,bitIndex41))& (v40(VarCurr,bitIndex54)<->v270(VarCurr,bitIndex40))& (v40(VarCurr,bitIndex53)<->v270(VarCurr,bitIndex39))& (v40(VarCurr,bitIndex52)<->v270(VarCurr,bitIndex38))& (v40(VarCurr,bitIndex51)<->v270(VarCurr,bitIndex37))& (v40(VarCurr,bitIndex50)<->v270(VarCurr,bitIndex36))& (v40(VarCurr,bitIndex49)<->v270(VarCurr,bitIndex35))& (v40(VarCurr,bitIndex48)<->v270(VarCurr,bitIndex34))& (v40(VarCurr,bitIndex47)<->v270(VarCurr,bitIndex33))& (v40(VarCurr,bitIndex46)<->v270(VarCurr,bitIndex32))& (v40(VarCurr,bitIndex45)<->v270(VarCurr,bitIndex31))& (v40(VarCurr,bitIndex44)<->v270(VarCurr,bitIndex30))& (v40(VarCurr,bitIndex43)<->v270(VarCurr,bitIndex29))& (v40(VarCurr,bitIndex42)<->v270(VarCurr,bitIndex28))& (v40(VarCurr,bitIndex41)<->v270(VarCurr,bitIndex27))& (v40(VarCurr,bitIndex40)<->v270(VarCurr,bitIndex26))& (v40(VarCurr,bitIndex39)<->v270(VarCurr,bitIndex25))& (v40(VarCurr,bitIndex38)<->v270(VarCurr,bitIndex24))& (v40(VarCurr,bitIndex37)<->v270(VarCurr,bitIndex23))& (v40(VarCurr,bitIndex36)<->v270(VarCurr,bitIndex22))& (v40(VarCurr,bitIndex35)<->v270(VarCurr,bitIndex21))& (v40(VarCurr,bitIndex34)<->v270(VarCurr,bitIndex20))& (v40(VarCurr,bitIndex33)<->v270(VarCurr,bitIndex19))& (v40(VarCurr,bitIndex32)<->v270(VarCurr,bitIndex18))& (v40(VarCurr,bitIndex31)<->v270(VarCurr,bitIndex17))& (v40(VarCurr,bitIndex30)<->v270(VarCurr,bitIndex16))& (v40(VarCurr,bitIndex29)<->v270(VarCurr,bitIndex15))& (v40(VarCurr,bitIndex28)<->v270(VarCurr,bitIndex14))& (v40(VarCurr,bitIndex27)<->v270(VarCurr,bitIndex13))& (v40(VarCurr,bitIndex26)<->v270(VarCurr,bitIndex12))& (v40(VarCurr,bitIndex25)<->v270(VarCurr,bitIndex11))& (v40(VarCurr,bitIndex24)<->v270(VarCurr,bitIndex10))& (v40(VarCurr,bitIndex23)<->v270(VarCurr,bitIndex9))& (v40(VarCurr,bitIndex22)<->v270(VarCurr,bitIndex8))& (v40(VarCurr,bitIndex21)<->v270(VarCurr,bitIndex7))& (v40(VarCurr,bitIndex20)<->v270(VarCurr,bitIndex6))& (v40(VarCurr,bitIndex19)<->v270(VarCurr,bitIndex5))& (v40(VarCurr,bitIndex18)<->v270(VarCurr,bitIndex4))& (v40(VarCurr,bitIndex17)<->v270(VarCurr,bitIndex3))& (v40(VarCurr,bitIndex16)<->v270(VarCurr,bitIndex2))& (v40(VarCurr,bitIndex15)<->v270(VarCurr,bitIndex1))& (v40(VarCurr,bitIndex14)<->v270(VarCurr,bitIndex0))).
% 78.37/77.74  all VarCurr (v292(VarCurr)<-> (v40(VarCurr,bitIndex60)<->v270(VarCurr,bitIndex46))& (v40(VarCurr,bitIndex59)<->v270(VarCurr,bitIndex45))& (v40(VarCurr,bitIndex58)<->v270(VarCurr,bitIndex44))& (v40(VarCurr,bitIndex57)<->v270(VarCurr,bitIndex43))& (v40(VarCurr,bitIndex56)<->v270(VarCurr,bitIndex42))& (v40(VarCurr,bitIndex55)<->v270(VarCurr,bitIndex41))& (v40(VarCurr,bitIndex54)<->v270(VarCurr,bitIndex40))& (v40(VarCurr,bitIndex53)<->v270(VarCurr,bitIndex39))& (v40(VarCurr,bitIndex52)<->v270(VarCurr,bitIndex38))& (v40(VarCurr,bitIndex51)<->v270(VarCurr,bitIndex37))& (v40(VarCurr,bitIndex50)<->v270(VarCurr,bitIndex36))& (v40(VarCurr,bitIndex49)<->v270(VarCurr,bitIndex35))& (v40(VarCurr,bitIndex48)<->v270(VarCurr,bitIndex34))& (v40(VarCurr,bitIndex47)<->v270(VarCurr,bitIndex33))& (v40(VarCurr,bitIndex46)<->v270(VarCurr,bitIndex32))& (v40(VarCurr,bitIndex45)<->v270(VarCurr,bitIndex31))& (v40(VarCurr,bitIndex44)<->v270(VarCurr,bitIndex30))& (v40(VarCurr,bitIndex43)<->v270(VarCurr,bitIndex29))& (v40(VarCurr,bitIndex42)<->v270(VarCurr,bitIndex28))& (v40(VarCurr,bitIndex41)<->v270(VarCurr,bitIndex27))& (v40(VarCurr,bitIndex40)<->v270(VarCurr,bitIndex26))& (v40(VarCurr,bitIndex39)<->v270(VarCurr,bitIndex25))& (v40(VarCurr,bitIndex38)<->v270(VarCurr,bitIndex24))& (v40(VarCurr,bitIndex37)<->v270(VarCurr,bitIndex23))& (v40(VarCurr,bitIndex36)<->v270(VarCurr,bitIndex22))& (v40(VarCurr,bitIndex35)<->v270(VarCurr,bitIndex21))& (v40(VarCurr,bitIndex34)<->v270(VarCurr,bitIndex20))& (v40(VarCurr,bitIndex33)<->v270(VarCurr,bitIndex19))& (v40(VarCurr,bitIndex32)<->v270(VarCurr,bitIndex18))& (v40(VarCurr,bitIndex31)<->v270(VarCurr,bitIndex17))& (v40(VarCurr,bitIndex30)<->v270(VarCurr,bitIndex16))& (v40(VarCurr,bitIndex29)<->v270(VarCurr,bitIndex15))& (v40(VarCurr,bitIndex28)<->v270(VarCurr,bitIndex14))& (v40(VarCurr,bitIndex27)<->v270(VarCurr,bitIndex13))& (v40(VarCurr,bitIndex26)<->v270(VarCurr,bitIndex12))& (v40(VarCurr,bitIndex25)<->v270(VarCurr,bitIndex11))& (v40(VarCurr,bitIndex24)<->v270(VarCurr,bitIndex10))& (v40(VarCurr,bitIndex23)<->v270(VarCurr,bitIndex9))& (v40(VarCurr,bitIndex22)<->v270(VarCurr,bitIndex8))& (v40(VarCurr,bitIndex21)<->v270(VarCurr,bitIndex7))& (v40(VarCurr,bitIndex20)<->v270(VarCurr,bitIndex6))& (v40(VarCurr,bitIndex19)<->v270(VarCurr,bitIndex5))& (v40(VarCurr,bitIndex18)<->v270(VarCurr,bitIndex4))& (v40(VarCurr,bitIndex17)<->v270(VarCurr,bitIndex3))& (v40(VarCurr,bitIndex16)<->v270(VarCurr,bitIndex2))& (v40(VarCurr,bitIndex15)<->v270(VarCurr,bitIndex1))& (v40(VarCurr,bitIndex14)<->v270(VarCurr,bitIndex0))).
% 78.37/77.75  all VarCurr (v290(VarCurr)<->v246(VarCurr)&v291(VarCurr)).
% 78.37/77.75  all VarCurr (-v291(VarCurr)<->v250(VarCurr)).
% 78.37/77.75  all VarCurr (v270(VarCurr,bitIndex47)<->v272(VarCurr,bitIndex47)).
% 78.37/77.75  all VarCurr (v40(VarCurr,bitIndex61)<->v42(VarCurr,bitIndex61)).
% 78.37/77.75  all VarCurr (v42(VarCurr,bitIndex61)<->v44(VarCurr,bitIndex61)).
% 78.37/77.75  all VarCurr (v44(VarCurr,bitIndex61)<->v46(VarCurr,bitIndex61)).
% 78.37/77.75  all VarCurr (v46(VarCurr,bitIndex61)<->v48(VarCurr,bitIndex641)).
% 78.37/77.75  all VarNext (v48(VarNext,bitIndex641)<->v282(VarNext,bitIndex61)).
% 78.37/77.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v284(VarNext)-> (v282(VarNext,bitIndex115)<->v48(VarCurr,bitIndex695))& (v282(VarNext,bitIndex114)<->v48(VarCurr,bitIndex694))& (v282(VarNext,bitIndex113)<->v48(VarCurr,bitIndex693))& (v282(VarNext,bitIndex112)<->v48(VarCurr,bitIndex692))& (v282(VarNext,bitIndex111)<->v48(VarCurr,bitIndex691))& (v282(VarNext,bitIndex110)<->v48(VarCurr,bitIndex690))& (v282(VarNext,bitIndex109)<->v48(VarCurr,bitIndex689))& (v282(VarNext,bitIndex108)<->v48(VarCurr,bitIndex688))& (v282(VarNext,bitIndex107)<->v48(VarCurr,bitIndex687))& (v282(VarNext,bitIndex106)<->v48(VarCurr,bitIndex686))& (v282(VarNext,bitIndex105)<->v48(VarCurr,bitIndex685))& (v282(VarNext,bitIndex104)<->v48(VarCurr,bitIndex684))& (v282(VarNext,bitIndex103)<->v48(VarCurr,bitIndex683))& (v282(VarNext,bitIndex102)<->v48(VarCurr,bitIndex682))& (v282(VarNext,bitIndex101)<->v48(VarCurr,bitIndex681))& (v282(VarNext,bitIndex100)<->v48(VarCurr,bitIndex680))& (v282(VarNext,bitIndex99)<->v48(VarCurr,bitIndex679))& (v282(VarNext,bitIndex98)<->v48(VarCurr,bitIndex678))& (v282(VarNext,bitIndex97)<->v48(VarCurr,bitIndex677))& (v282(VarNext,bitIndex96)<->v48(VarCurr,bitIndex676))& (v282(VarNext,bitIndex95)<->v48(VarCurr,bitIndex675))& (v282(VarNext,bitIndex94)<->v48(VarCurr,bitIndex674))& (v282(VarNext,bitIndex93)<->v48(VarCurr,bitIndex673))& (v282(VarNext,bitIndex92)<->v48(VarCurr,bitIndex672))& (v282(VarNext,bitIndex91)<->v48(VarCurr,bitIndex671))& (v282(VarNext,bitIndex90)<->v48(VarCurr,bitIndex670))& (v282(VarNext,bitIndex89)<->v48(VarCurr,bitIndex669))& (v282(VarNext,bitIndex88)<->v48(VarCurr,bitIndex668))& (v282(VarNext,bitIndex87)<->v48(VarCurr,bitIndex667))& (v282(VarNext,bitIndex86)<->v48(VarCurr,bitIndex666))& (v282(VarNext,bitIndex85)<->v48(VarCurr,bitIndex665))& (v282(VarNext,bitIndex84)<->v48(VarCurr,bitIndex664))& (v282(VarNext,bitIndex83)<->v48(VarCurr,bitIndex663))& (v282(VarNext,bitIndex82)<->v48(VarCurr,bitIndex662))& (v282(VarNext,bitIndex81)<->v48(VarCurr,bitIndex661))& (v282(VarNext,bitIndex80)<->v48(VarCurr,bitIndex660))& (v282(VarNext,bitIndex79)<->v48(VarCurr,bitIndex659))& (v282(VarNext,bitIndex78)<->v48(VarCurr,bitIndex658))& (v282(VarNext,bitIndex77)<->v48(VarCurr,bitIndex657))& (v282(VarNext,bitIndex76)<->v48(VarCurr,bitIndex656))& (v282(VarNext,bitIndex75)<->v48(VarCurr,bitIndex655))& (v282(VarNext,bitIndex74)<->v48(VarCurr,bitIndex654))& (v282(VarNext,bitIndex73)<->v48(VarCurr,bitIndex653))& (v282(VarNext,bitIndex72)<->v48(VarCurr,bitIndex652))& (v282(VarNext,bitIndex71)<->v48(VarCurr,bitIndex651))& (v282(VarNext,bitIndex70)<->v48(VarCurr,bitIndex650))& (v282(VarNext,bitIndex69)<->v48(VarCurr,bitIndex649))& (v282(VarNext,bitIndex68)<->v48(VarCurr,bitIndex648))& (v282(VarNext,bitIndex67)<->v48(VarCurr,bitIndex647))& (v282(VarNext,bitIndex66)<->v48(VarCurr,bitIndex646))& (v282(VarNext,bitIndex65)<->v48(VarCurr,bitIndex645))& (v282(VarNext,bitIndex64)<->v48(VarCurr,bitIndex644))& (v282(VarNext,bitIndex63)<->v48(VarCurr,bitIndex643))& (v282(VarNext,bitIndex62)<->v48(VarCurr,bitIndex642))& (v282(VarNext,bitIndex61)<->v48(VarCurr,bitIndex641))& (v282(VarNext,bitIndex60)<->v48(VarCurr,bitIndex640))& (v282(VarNext,bitIndex59)<->v48(VarCurr,bitIndex639))& (v282(VarNext,bitIndex58)<->v48(VarCurr,bitIndex638))& (v282(VarNext,bitIndex57)<->v48(VarCurr,bitIndex637))& (v282(VarNext,bitIndex56)<->v48(VarCurr,bitIndex636))& (v282(VarNext,bitIndex55)<->v48(VarCurr,bitIndex635))& (v282(VarNext,bitIndex54)<->v48(VarCurr,bitIndex634))& (v282(VarNext,bitIndex53)<->v48(VarCurr,bitIndex633))& (v282(VarNext,bitIndex52)<->v48(VarCurr,bitIndex632))& (v282(VarNext,bitIndex51)<->v48(VarCurr,bitIndex631))& (v282(VarNext,bitIndex50)<->v48(VarCurr,bitIndex630))& (v282(VarNext,bitIndex49)<->v48(VarCurr,bitIndex629))& (v282(VarNext,bitIndex48)<->v48(VarCurr,bitIndex628))& (v282(VarNext,bitIndex47)<->v48(VarCurr,bitIndex627))& (v282(VarNext,bitIndex46)<->v48(VarCurr,bitIndex626))& (v282(VarNext,bitIndex45)<->v48(VarCurr,bitIndex625))& (v282(VarNext,bitIndex44)<->v48(VarCurr,bitIndex624))& (v282(VarNext,bitIndex43)<->v48(VarCurr,bitIndex623))& (v282(VarNext,bitIndex42)<->v48(VarCurr,bitIndex622))& (v282(VarNext,bitIndex41)<->v48(VarCurr,bitIndex621))& (v282(VarNext,bitIndex40)<->v48(VarCurr,bitIndex620))& (v282(VarNext,bitIndex39)<->v48(VarCurr,bitIndex619))& (v282(VarNext,bitIndex38)<->v48(VarCurr,bitIndex618))& (v282(VarNext,bitIndex37)<->v48(VarCurr,bitIndex617))& (v282(VarNext,bitIndex36)<->v48(VarCurr,bitIndex616))& (v282(VarNext,bitIndex35)<->v48(VarCurr,bitIndex615))& (v282(VarNext,bitIndex34)<->v48(VarCurr,bitIndex614))& (v282(VarNext,bitIndex33)<->v48(VarCurr,bitIndex613))& (v282(VarNext,bitIndex32)<->v48(VarCurr,bitIndex612))& (v282(VarNext,bitIndex31)<->v48(VarCurr,bitIndex611))& (v282(VarNext,bitIndex30)<->v48(VarCurr,bitIndex610))& (v282(VarNext,bitIndex29)<->v48(VarCurr,bitIndex609))& (v282(VarNext,bitIndex28)<->v48(VarCurr,bitIndex608))& (v282(VarNext,bitIndex27)<->v48(VarCurr,bitIndex607))& (v282(VarNext,bitIndex26)<->v48(VarCurr,bitIndex606))& (v282(VarNext,bitIndex25)<->v48(VarCurr,bitIndex605))& (v282(VarNext,bitIndex24)<->v48(VarCurr,bitIndex604))& (v282(VarNext,bitIndex23)<->v48(VarCurr,bitIndex603))& (v282(VarNext,bitIndex22)<->v48(VarCurr,bitIndex602))& (v282(VarNext,bitIndex21)<->v48(VarCurr,bitIndex601))& (v282(VarNext,bitIndex20)<->v48(VarCurr,bitIndex600))& (v282(VarNext,bitIndex19)<->v48(VarCurr,bitIndex599))& (v282(VarNext,bitIndex18)<->v48(VarCurr,bitIndex598))& (v282(VarNext,bitIndex17)<->v48(VarCurr,bitIndex597))& (v282(VarNext,bitIndex16)<->v48(VarCurr,bitIndex596))& (v282(VarNext,bitIndex15)<->v48(VarCurr,bitIndex595))& (v282(VarNext,bitIndex14)<->v48(VarCurr,bitIndex594))& (v282(VarNext,bitIndex13)<->v48(VarCurr,bitIndex593))& (v282(VarNext,bitIndex12)<->v48(VarCurr,bitIndex592))& (v282(VarNext,bitIndex11)<->v48(VarCurr,bitIndex591))& (v282(VarNext,bitIndex10)<->v48(VarCurr,bitIndex590))& (v282(VarNext,bitIndex9)<->v48(VarCurr,bitIndex589))& (v282(VarNext,bitIndex8)<->v48(VarCurr,bitIndex588))& (v282(VarNext,bitIndex7)<->v48(VarCurr,bitIndex587))& (v282(VarNext,bitIndex6)<->v48(VarCurr,bitIndex586))& (v282(VarNext,bitIndex5)<->v48(VarCurr,bitIndex585))& (v282(VarNext,bitIndex4)<->v48(VarCurr,bitIndex584))& (v282(VarNext,bitIndex3)<->v48(VarCurr,bitIndex583))& (v282(VarNext,bitIndex2)<->v48(VarCurr,bitIndex582))& (v282(VarNext,bitIndex1)<->v48(VarCurr,bitIndex581))& (v282(VarNext,bitIndex0)<->v48(VarCurr,bitIndex580)))).
% 78.37/77.75  all VarNext (v284(VarNext)-> (all B (range_115_0(B)-> (v282(VarNext,B)<->v238(VarNext,B))))).
% 78.37/77.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v284(VarNext)<->v285(VarNext)&v233(VarNext))).
% 78.37/77.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v285(VarNext)<->v287(VarNext)&v188(VarNext))).
% 78.37/77.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v287(VarNext)<->v207(VarNext))).
% 78.37/77.75  all VarNext (v48(VarNext,bitIndex525)<->v274(VarNext,bitIndex61)).
% 78.37/77.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v276(VarNext)-> (v274(VarNext,bitIndex115)<->v48(VarCurr,bitIndex579))& (v274(VarNext,bitIndex114)<->v48(VarCurr,bitIndex578))& (v274(VarNext,bitIndex113)<->v48(VarCurr,bitIndex577))& (v274(VarNext,bitIndex112)<->v48(VarCurr,bitIndex576))& (v274(VarNext,bitIndex111)<->v48(VarCurr,bitIndex575))& (v274(VarNext,bitIndex110)<->v48(VarCurr,bitIndex574))& (v274(VarNext,bitIndex109)<->v48(VarCurr,bitIndex573))& (v274(VarNext,bitIndex108)<->v48(VarCurr,bitIndex572))& (v274(VarNext,bitIndex107)<->v48(VarCurr,bitIndex571))& (v274(VarNext,bitIndex106)<->v48(VarCurr,bitIndex570))& (v274(VarNext,bitIndex105)<->v48(VarCurr,bitIndex569))& (v274(VarNext,bitIndex104)<->v48(VarCurr,bitIndex568))& (v274(VarNext,bitIndex103)<->v48(VarCurr,bitIndex567))& (v274(VarNext,bitIndex102)<->v48(VarCurr,bitIndex566))& (v274(VarNext,bitIndex101)<->v48(VarCurr,bitIndex565))& (v274(VarNext,bitIndex100)<->v48(VarCurr,bitIndex564))& (v274(VarNext,bitIndex99)<->v48(VarCurr,bitIndex563))& (v274(VarNext,bitIndex98)<->v48(VarCurr,bitIndex562))& (v274(VarNext,bitIndex97)<->v48(VarCurr,bitIndex561))& (v274(VarNext,bitIndex96)<->v48(VarCurr,bitIndex560))& (v274(VarNext,bitIndex95)<->v48(VarCurr,bitIndex559))& (v274(VarNext,bitIndex94)<->v48(VarCurr,bitIndex558))& (v274(VarNext,bitIndex93)<->v48(VarCurr,bitIndex557))& (v274(VarNext,bitIndex92)<->v48(VarCurr,bitIndex556))& (v274(VarNext,bitIndex91)<->v48(VarCurr,bitIndex555))& (v274(VarNext,bitIndex90)<->v48(VarCurr,bitIndex554))& (v274(VarNext,bitIndex89)<->v48(VarCurr,bitIndex553))& (v274(VarNext,bitIndex88)<->v48(VarCurr,bitIndex552))& (v274(VarNext,bitIndex87)<->v48(VarCurr,bitIndex551))& (v274(VarNext,bitIndex86)<->v48(VarCurr,bitIndex550))& (v274(VarNext,bitIndex85)<->v48(VarCurr,bitIndex549))& (v274(VarNext,bitIndex84)<->v48(VarCurr,bitIndex548))& (v274(VarNext,bitIndex83)<->v48(VarCurr,bitIndex547))& (v274(VarNext,bitIndex82)<->v48(VarCurr,bitIndex546))& (v274(VarNext,bitIndex81)<->v48(VarCurr,bitIndex545))& (v274(VarNext,bitIndex80)<->v48(VarCurr,bitIndex544))& (v274(VarNext,bitIndex79)<->v48(VarCurr,bitIndex543))& (v274(VarNext,bitIndex78)<->v48(VarCurr,bitIndex542))& (v274(VarNext,bitIndex77)<->v48(VarCurr,bitIndex541))& (v274(VarNext,bitIndex76)<->v48(VarCurr,bitIndex540))& (v274(VarNext,bitIndex75)<->v48(VarCurr,bitIndex539))& (v274(VarNext,bitIndex74)<->v48(VarCurr,bitIndex538))& (v274(VarNext,bitIndex73)<->v48(VarCurr,bitIndex537))& (v274(VarNext,bitIndex72)<->v48(VarCurr,bitIndex536))& (v274(VarNext,bitIndex71)<->v48(VarCurr,bitIndex535))& (v274(VarNext,bitIndex70)<->v48(VarCurr,bitIndex534))& (v274(VarNext,bitIndex69)<->v48(VarCurr,bitIndex533))& (v274(VarNext,bitIndex68)<->v48(VarCurr,bitIndex532))& (v274(VarNext,bitIndex67)<->v48(VarCurr,bitIndex531))& (v274(VarNext,bitIndex66)<->v48(VarCurr,bitIndex530))& (v274(VarNext,bitIndex65)<->v48(VarCurr,bitIndex529))& (v274(VarNext,bitIndex64)<->v48(VarCurr,bitIndex528))& (v274(VarNext,bitIndex63)<->v48(VarCurr,bitIndex527))& (v274(VarNext,bitIndex62)<->v48(VarCurr,bitIndex526))& (v274(VarNext,bitIndex61)<->v48(VarCurr,bitIndex525))& (v274(VarNext,bitIndex60)<->v48(VarCurr,bitIndex524))& (v274(VarNext,bitIndex59)<->v48(VarCurr,bitIndex523))& (v274(VarNext,bitIndex58)<->v48(VarCurr,bitIndex522))& (v274(VarNext,bitIndex57)<->v48(VarCurr,bitIndex521))& (v274(VarNext,bitIndex56)<->v48(VarCurr,bitIndex520))& (v274(VarNext,bitIndex55)<->v48(VarCurr,bitIndex519))& (v274(VarNext,bitIndex54)<->v48(VarCurr,bitIndex518))& (v274(VarNext,bitIndex53)<->v48(VarCurr,bitIndex517))& (v274(VarNext,bitIndex52)<->v48(VarCurr,bitIndex516))& (v274(VarNext,bitIndex51)<->v48(VarCurr,bitIndex515))& (v274(VarNext,bitIndex50)<->v48(VarCurr,bitIndex514))& (v274(VarNext,bitIndex49)<->v48(VarCurr,bitIndex513))& (v274(VarNext,bitIndex48)<->v48(VarCurr,bitIndex512))& (v274(VarNext,bitIndex47)<->v48(VarCurr,bitIndex511))& (v274(VarNext,bitIndex46)<->v48(VarCurr,bitIndex510))& (v274(VarNext,bitIndex45)<->v48(VarCurr,bitIndex509))& (v274(VarNext,bitIndex44)<->v48(VarCurr,bitIndex508))& (v274(VarNext,bitIndex43)<->v48(VarCurr,bitIndex507))& (v274(VarNext,bitIndex42)<->v48(VarCurr,bitIndex506))& (v274(VarNext,bitIndex41)<->v48(VarCurr,bitIndex505))& (v274(VarNext,bitIndex40)<->v48(VarCurr,bitIndex504))& (v274(VarNext,bitIndex39)<->v48(VarCurr,bitIndex503))& (v274(VarNext,bitIndex38)<->v48(VarCurr,bitIndex502))& (v274(VarNext,bitIndex37)<->v48(VarCurr,bitIndex501))& (v274(VarNext,bitIndex36)<->v48(VarCurr,bitIndex500))& (v274(VarNext,bitIndex35)<->v48(VarCurr,bitIndex499))& (v274(VarNext,bitIndex34)<->v48(VarCurr,bitIndex498))& (v274(VarNext,bitIndex33)<->v48(VarCurr,bitIndex497))& (v274(VarNext,bitIndex32)<->v48(VarCurr,bitIndex496))& (v274(VarNext,bitIndex31)<->v48(VarCurr,bitIndex495))& (v274(VarNext,bitIndex30)<->v48(VarCurr,bitIndex494))& (v274(VarNext,bitIndex29)<->v48(VarCurr,bitIndex493))& (v274(VarNext,bitIndex28)<->v48(VarCurr,bitIndex492))& (v274(VarNext,bitIndex27)<->v48(VarCurr,bitIndex491))& (v274(VarNext,bitIndex26)<->v48(VarCurr,bitIndex490))& (v274(VarNext,bitIndex25)<->v48(VarCurr,bitIndex489))& (v274(VarNext,bitIndex24)<->v48(VarCurr,bitIndex488))& (v274(VarNext,bitIndex23)<->v48(VarCurr,bitIndex487))& (v274(VarNext,bitIndex22)<->v48(VarCurr,bitIndex486))& (v274(VarNext,bitIndex21)<->v48(VarCurr,bitIndex485))& (v274(VarNext,bitIndex20)<->v48(VarCurr,bitIndex484))& (v274(VarNext,bitIndex19)<->v48(VarCurr,bitIndex483))& (v274(VarNext,bitIndex18)<->v48(VarCurr,bitIndex482))& (v274(VarNext,bitIndex17)<->v48(VarCurr,bitIndex481))& (v274(VarNext,bitIndex16)<->v48(VarCurr,bitIndex480))& (v274(VarNext,bitIndex15)<->v48(VarCurr,bitIndex479))& (v274(VarNext,bitIndex14)<->v48(VarCurr,bitIndex478))& (v274(VarNext,bitIndex13)<->v48(VarCurr,bitIndex477))& (v274(VarNext,bitIndex12)<->v48(VarCurr,bitIndex476))& (v274(VarNext,bitIndex11)<->v48(VarCurr,bitIndex475))& (v274(VarNext,bitIndex10)<->v48(VarCurr,bitIndex474))& (v274(VarNext,bitIndex9)<->v48(VarCurr,bitIndex473))& (v274(VarNext,bitIndex8)<->v48(VarCurr,bitIndex472))& (v274(VarNext,bitIndex7)<->v48(VarCurr,bitIndex471))& (v274(VarNext,bitIndex6)<->v48(VarCurr,bitIndex470))& (v274(VarNext,bitIndex5)<->v48(VarCurr,bitIndex469))& (v274(VarNext,bitIndex4)<->v48(VarCurr,bitIndex468))& (v274(VarNext,bitIndex3)<->v48(VarCurr,bitIndex467))& (v274(VarNext,bitIndex2)<->v48(VarCurr,bitIndex466))& (v274(VarNext,bitIndex1)<->v48(VarCurr,bitIndex465))& (v274(VarNext,bitIndex0)<->v48(VarCurr,bitIndex464)))).
% 78.37/77.75  all VarNext (v276(VarNext)-> (all B (range_115_0(B)-> (v274(VarNext,B)<->v219(VarNext,B))))).
% 78.37/77.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v276(VarNext)<->v277(VarNext)&v213(VarNext))).
% 78.37/77.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v277(VarNext)<->v279(VarNext)&v188(VarNext))).
% 78.37/77.75  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v279(VarNext)<->v207(VarNext))).
% 78.37/77.75  all VarCurr (v180(VarCurr,bitIndex61)<->v182(VarCurr,bitIndex61)).
% 78.37/77.75  all VarCurr (v182(VarCurr,bitIndex61)<->v184(VarCurr,bitIndex61)).
% 78.37/77.75  all VarCurr (v184(VarCurr,bitIndex61)<->v186(VarCurr,bitIndex61)).
% 78.37/77.75  all VarCurr B (range_46_0(B)-> (v270(VarCurr,B)<->v272(VarCurr,B))).
% 78.37/77.75  all B (range_46_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B|bitIndex6=B|bitIndex7=B|bitIndex8=B|bitIndex9=B|bitIndex10=B|bitIndex11=B|bitIndex12=B|bitIndex13=B|bitIndex14=B|bitIndex15=B|bitIndex16=B|bitIndex17=B|bitIndex18=B|bitIndex19=B|bitIndex20=B|bitIndex21=B|bitIndex22=B|bitIndex23=B|bitIndex24=B|bitIndex25=B|bitIndex26=B|bitIndex27=B|bitIndex28=B|bitIndex29=B|bitIndex30=B|bitIndex31=B|bitIndex32=B|bitIndex33=B|bitIndex34=B|bitIndex35=B|bitIndex36=B|bitIndex37=B|bitIndex38=B|bitIndex39=B|bitIndex40=B|bitIndex41=B|bitIndex42=B|bitIndex43=B|bitIndex44=B|bitIndex45=B|bitIndex46=B).
% 78.37/77.75  all VarCurr B (range_60_14(B)-> (v40(VarCurr,B)<->v42(VarCurr,B))).
% 78.37/77.75  all B (range_60_14(B)<->bitIndex14=B|bitIndex15=B|bitIndex16=B|bitIndex17=B|bitIndex18=B|bitIndex19=B|bitIndex20=B|bitIndex21=B|bitIndex22=B|bitIndex23=B|bitIndex24=B|bitIndex25=B|bitIndex26=B|bitIndex27=B|bitIndex28=B|bitIndex29=B|bitIndex30=B|bitIndex31=B|bitIndex32=B|bitIndex33=B|bitIndex34=B|bitIndex35=B|bitIndex36=B|bitIndex37=B|bitIndex38=B|bitIndex39=B|bitIndex40=B|bitIndex41=B|bitIndex42=B|bitIndex43=B|bitIndex44=B|bitIndex45=B|bitIndex46=B|bitIndex47=B|bitIndex48=B|bitIndex49=B|bitIndex50=B|bitIndex51=B|bitIndex52=B|bitIndex53=B|bitIndex54=B|bitIndex55=B|bitIndex56=B|bitIndex57=B|bitIndex58=B|bitIndex59=B|bitIndex60=B).
% 78.37/77.75  all VarCurr B (range_60_30(B)-> (v42(VarCurr,B)<->v44(VarCurr,B))).
% 78.37/77.75  all VarCurr B (range_60_30(B)-> (v44(VarCurr,B)<->v46(VarCurr,B))).
% 78.37/77.75  all VarCurr ((v46(VarCurr,bitIndex60)<->v48(VarCurr,bitIndex640))& (v46(VarCurr,bitIndex59)<->v48(VarCurr,bitIndex639))& (v46(VarCurr,bitIndex58)<->v48(VarCurr,bitIndex638))& (v46(VarCurr,bitIndex57)<->v48(VarCurr,bitIndex637))& (v46(VarCurr,bitIndex56)<->v48(VarCurr,bitIndex636))& (v46(VarCurr,bitIndex55)<->v48(VarCurr,bitIndex635))& (v46(VarCurr,bitIndex54)<->v48(VarCurr,bitIndex634))& (v46(VarCurr,bitIndex53)<->v48(VarCurr,bitIndex633))& (v46(VarCurr,bitIndex52)<->v48(VarCurr,bitIndex632))& (v46(VarCurr,bitIndex51)<->v48(VarCurr,bitIndex631))& (v46(VarCurr,bitIndex50)<->v48(VarCurr,bitIndex630))& (v46(VarCurr,bitIndex49)<->v48(VarCurr,bitIndex629))& (v46(VarCurr,bitIndex48)<->v48(VarCurr,bitIndex628))& (v46(VarCurr,bitIndex47)<->v48(VarCurr,bitIndex627))& (v46(VarCurr,bitIndex46)<->v48(VarCurr,bitIndex626))& (v46(VarCurr,bitIndex45)<->v48(VarCurr,bitIndex625))& (v46(VarCurr,bitIndex44)<->v48(VarCurr,bitIndex624))& (v46(VarCurr,bitIndex43)<->v48(VarCurr,bitIndex623))& (v46(VarCurr,bitIndex42)<->v48(VarCurr,bitIndex622))& (v46(VarCurr,bitIndex41)<->v48(VarCurr,bitIndex621))& (v46(VarCurr,bitIndex40)<->v48(VarCurr,bitIndex620))& (v46(VarCurr,bitIndex39)<->v48(VarCurr,bitIndex619))& (v46(VarCurr,bitIndex38)<->v48(VarCurr,bitIndex618))& (v46(VarCurr,bitIndex37)<->v48(VarCurr,bitIndex617))& (v46(VarCurr,bitIndex36)<->v48(VarCurr,bitIndex616))& (v46(VarCurr,bitIndex35)<->v48(VarCurr,bitIndex615))& (v46(VarCurr,bitIndex34)<->v48(VarCurr,bitIndex614))& (v46(VarCurr,bitIndex33)<->v48(VarCurr,bitIndex613))& (v46(VarCurr,bitIndex32)<->v48(VarCurr,bitIndex612))& (v46(VarCurr,bitIndex31)<->v48(VarCurr,bitIndex611))& (v46(VarCurr,bitIndex30)<->v48(VarCurr,bitIndex610))).
% 78.37/77.76  all VarNext ((v48(VarNext,bitIndex640)<->v262(VarNext,bitIndex60))& (v48(VarNext,bitIndex639)<->v262(VarNext,bitIndex59))& (v48(VarNext,bitIndex638)<->v262(VarNext,bitIndex58))& (v48(VarNext,bitIndex637)<->v262(VarNext,bitIndex57))& (v48(VarNext,bitIndex636)<->v262(VarNext,bitIndex56))& (v48(VarNext,bitIndex635)<->v262(VarNext,bitIndex55))& (v48(VarNext,bitIndex634)<->v262(VarNext,bitIndex54))& (v48(VarNext,bitIndex633)<->v262(VarNext,bitIndex53))& (v48(VarNext,bitIndex632)<->v262(VarNext,bitIndex52))& (v48(VarNext,bitIndex631)<->v262(VarNext,bitIndex51))& (v48(VarNext,bitIndex630)<->v262(VarNext,bitIndex50))& (v48(VarNext,bitIndex629)<->v262(VarNext,bitIndex49))& (v48(VarNext,bitIndex628)<->v262(VarNext,bitIndex48))& (v48(VarNext,bitIndex627)<->v262(VarNext,bitIndex47))& (v48(VarNext,bitIndex626)<->v262(VarNext,bitIndex46))& (v48(VarNext,bitIndex625)<->v262(VarNext,bitIndex45))& (v48(VarNext,bitIndex624)<->v262(VarNext,bitIndex44))& (v48(VarNext,bitIndex623)<->v262(VarNext,bitIndex43))& (v48(VarNext,bitIndex622)<->v262(VarNext,bitIndex42))& (v48(VarNext,bitIndex621)<->v262(VarNext,bitIndex41))& (v48(VarNext,bitIndex620)<->v262(VarNext,bitIndex40))& (v48(VarNext,bitIndex619)<->v262(VarNext,bitIndex39))& (v48(VarNext,bitIndex618)<->v262(VarNext,bitIndex38))& (v48(VarNext,bitIndex617)<->v262(VarNext,bitIndex37))& (v48(VarNext,bitIndex616)<->v262(VarNext,bitIndex36))& (v48(VarNext,bitIndex615)<->v262(VarNext,bitIndex35))& (v48(VarNext,bitIndex614)<->v262(VarNext,bitIndex34))& (v48(VarNext,bitIndex613)<->v262(VarNext,bitIndex33))& (v48(VarNext,bitIndex612)<->v262(VarNext,bitIndex32))& (v48(VarNext,bitIndex611)<->v262(VarNext,bitIndex31))& (v48(VarNext,bitIndex610)<->v262(VarNext,bitIndex30))).
% 78.37/77.76  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v264(VarNext)-> (v262(VarNext,bitIndex115)<->v48(VarCurr,bitIndex695))& (v262(VarNext,bitIndex114)<->v48(VarCurr,bitIndex694))& (v262(VarNext,bitIndex113)<->v48(VarCurr,bitIndex693))& (v262(VarNext,bitIndex112)<->v48(VarCurr,bitIndex692))& (v262(VarNext,bitIndex111)<->v48(VarCurr,bitIndex691))& (v262(VarNext,bitIndex110)<->v48(VarCurr,bitIndex690))& (v262(VarNext,bitIndex109)<->v48(VarCurr,bitIndex689))& (v262(VarNext,bitIndex108)<->v48(VarCurr,bitIndex688))& (v262(VarNext,bitIndex107)<->v48(VarCurr,bitIndex687))& (v262(VarNext,bitIndex106)<->v48(VarCurr,bitIndex686))& (v262(VarNext,bitIndex105)<->v48(VarCurr,bitIndex685))& (v262(VarNext,bitIndex104)<->v48(VarCurr,bitIndex684))& (v262(VarNext,bitIndex103)<->v48(VarCurr,bitIndex683))& (v262(VarNext,bitIndex102)<->v48(VarCurr,bitIndex682))& (v262(VarNext,bitIndex101)<->v48(VarCurr,bitIndex681))& (v262(VarNext,bitIndex100)<->v48(VarCurr,bitIndex680))& (v262(VarNext,bitIndex99)<->v48(VarCurr,bitIndex679))& (v262(VarNext,bitIndex98)<->v48(VarCurr,bitIndex678))& (v262(VarNext,bitIndex97)<->v48(VarCurr,bitIndex677))& (v262(VarNext,bitIndex96)<->v48(VarCurr,bitIndex676))& (v262(VarNext,bitIndex95)<->v48(VarCurr,bitIndex675))& (v262(VarNext,bitIndex94)<->v48(VarCurr,bitIndex674))& (v262(VarNext,bitIndex93)<->v48(VarCurr,bitIndex673))& (v262(VarNext,bitIndex92)<->v48(VarCurr,bitIndex672))& (v262(VarNext,bitIndex91)<->v48(VarCurr,bitIndex671))& (v262(VarNext,bitIndex90)<->v48(VarCurr,bitIndex670))& (v262(VarNext,bitIndex89)<->v48(VarCurr,bitIndex669))& (v262(VarNext,bitIndex88)<->v48(VarCurr,bitIndex668))& (v262(VarNext,bitIndex87)<->v48(VarCurr,bitIndex667))& (v262(VarNext,bitIndex86)<->v48(VarCurr,bitIndex666))& (v262(VarNext,bitIndex85)<->v48(VarCurr,bitIndex665))& (v262(VarNext,bitIndex84)<->v48(VarCurr,bitIndex664))& (v262(VarNext,bitIndex83)<->v48(VarCurr,bitIndex663))& (v262(VarNext,bitIndex82)<->v48(VarCurr,bitIndex662))& (v262(VarNext,bitIndex81)<->v48(VarCurr,bitIndex661))& (v262(VarNext,bitIndex80)<->v48(VarCurr,bitIndex660))& (v262(VarNext,bitIndex79)<->v48(VarCurr,bitIndex659))& (v262(VarNext,bitIndex78)<->v48(VarCurr,bitIndex658))& (v262(VarNext,bitIndex77)<->v48(VarCurr,bitIndex657))& (v262(VarNext,bitIndex76)<->v48(VarCurr,bitIndex656))& (v262(VarNext,bitIndex75)<->v48(VarCurr,bitIndex655))& (v262(VarNext,bitIndex74)<->v48(VarCurr,bitIndex654))& (v262(VarNext,bitIndex73)<->v48(VarCurr,bitIndex653))& (v262(VarNext,bitIndex72)<->v48(VarCurr,bitIndex652))& (v262(VarNext,bitIndex71)<->v48(VarCurr,bitIndex651))& (v262(VarNext,bitIndex70)<->v48(VarCurr,bitIndex650))& (v262(VarNext,bitIndex69)<->v48(VarCurr,bitIndex649))& (v262(VarNext,bitIndex68)<->v48(VarCurr,bitIndex648))& (v262(VarNext,bitIndex67)<->v48(VarCurr,bitIndex647))& (v262(VarNext,bitIndex66)<->v48(VarCurr,bitIndex646))& (v262(VarNext,bitIndex65)<->v48(VarCurr,bitIndex645))& (v262(VarNext,bitIndex64)<->v48(VarCurr,bitIndex644))& (v262(VarNext,bitIndex63)<->v48(VarCurr,bitIndex643))& (v262(VarNext,bitIndex62)<->v48(VarCurr,bitIndex642))& (v262(VarNext,bitIndex61)<->v48(VarCurr,bitIndex641))& (v262(VarNext,bitIndex60)<->v48(VarCurr,bitIndex640))& (v262(VarNext,bitIndex59)<->v48(VarCurr,bitIndex639))& (v262(VarNext,bitIndex58)<->v48(VarCurr,bitIndex638))& (v262(VarNext,bitIndex57)<->v48(VarCurr,bitIndex637))& (v262(VarNext,bitIndex56)<->v48(VarCurr,bitIndex636))& (v262(VarNext,bitIndex55)<->v48(VarCurr,bitIndex635))& (v262(VarNext,bitIndex54)<->v48(VarCurr,bitIndex634))& (v262(VarNext,bitIndex53)<->v48(VarCurr,bitIndex633))& (v262(VarNext,bitIndex52)<->v48(VarCurr,bitIndex632))& (v262(VarNext,bitIndex51)<->v48(VarCurr,bitIndex631))& (v262(VarNext,bitIndex50)<->v48(VarCurr,bitIndex630))& (v262(VarNext,bitIndex49)<->v48(VarCurr,bitIndex629))& (v262(VarNext,bitIndex48)<->v48(VarCurr,bitIndex628))& (v262(VarNext,bitIndex47)<->v48(VarCurr,bitIndex627))& (v262(VarNext,bitIndex46)<->v48(VarCurr,bitIndex626))& (v262(VarNext,bitIndex45)<->v48(VarCurr,bitIndex625))& (v262(VarNext,bitIndex44)<->v48(VarCurr,bitIndex624))& (v262(VarNext,bitIndex43)<->v48(VarCurr,bitIndex623))& (v262(VarNext,bitIndex42)<->v48(VarCurr,bitIndex622))& (v262(VarNext,bitIndex41)<->v48(VarCurr,bitIndex621))& (v262(VarNext,bitIndex40)<->v48(VarCurr,bitIndex620))& (v262(VarNext,bitIndex39)<->v48(VarCurr,bitIndex619))& (v262(VarNext,bitIndex38)<->v48(VarCurr,bitIndex618))& (v262(VarNext,bitIndex37)<->v48(VarCurr,bitIndex617))& (v262(VarNext,bitIndex36)<->v48(VarCurr,bitIndex616))& (v262(VarNext,bitIndex35)<->v48(VarCurr,bitIndex615))& (v262(VarNext,bitIndex34)<->v48(VarCurr,bitIndex614))& (v262(VarNext,bitIndex33)<->v48(VarCurr,bitIndex613))& (v262(VarNext,bitIndex32)<->v48(VarCurr,bitIndex612))& (v262(VarNext,bitIndex31)<->v48(VarCurr,bitIndex611))& (v262(VarNext,bitIndex30)<->v48(VarCurr,bitIndex610))& (v262(VarNext,bitIndex29)<->v48(VarCurr,bitIndex609))& (v262(VarNext,bitIndex28)<->v48(VarCurr,bitIndex608))& (v262(VarNext,bitIndex27)<->v48(VarCurr,bitIndex607))& (v262(VarNext,bitIndex26)<->v48(VarCurr,bitIndex606))& (v262(VarNext,bitIndex25)<->v48(VarCurr,bitIndex605))& (v262(VarNext,bitIndex24)<->v48(VarCurr,bitIndex604))& (v262(VarNext,bitIndex23)<->v48(VarCurr,bitIndex603))& (v262(VarNext,bitIndex22)<->v48(VarCurr,bitIndex602))& (v262(VarNext,bitIndex21)<->v48(VarCurr,bitIndex601))& (v262(VarNext,bitIndex20)<->v48(VarCurr,bitIndex600))& (v262(VarNext,bitIndex19)<->v48(VarCurr,bitIndex599))& (v262(VarNext,bitIndex18)<->v48(VarCurr,bitIndex598))& (v262(VarNext,bitIndex17)<->v48(VarCurr,bitIndex597))& (v262(VarNext,bitIndex16)<->v48(VarCurr,bitIndex596))& (v262(VarNext,bitIndex15)<->v48(VarCurr,bitIndex595))& (v262(VarNext,bitIndex14)<->v48(VarCurr,bitIndex594))& (v262(VarNext,bitIndex13)<->v48(VarCurr,bitIndex593))& (v262(VarNext,bitIndex12)<->v48(VarCurr,bitIndex592))& (v262(VarNext,bitIndex11)<->v48(VarCurr,bitIndex591))& (v262(VarNext,bitIndex10)<->v48(VarCurr,bitIndex590))& (v262(VarNext,bitIndex9)<->v48(VarCurr,bitIndex589))& (v262(VarNext,bitIndex8)<->v48(VarCurr,bitIndex588))& (v262(VarNext,bitIndex7)<->v48(VarCurr,bitIndex587))& (v262(VarNext,bitIndex6)<->v48(VarCurr,bitIndex586))& (v262(VarNext,bitIndex5)<->v48(VarCurr,bitIndex585))& (v262(VarNext,bitIndex4)<->v48(VarCurr,bitIndex584))& (v262(VarNext,bitIndex3)<->v48(VarCurr,bitIndex583))& (v262(VarNext,bitIndex2)<->v48(VarCurr,bitIndex582))& (v262(VarNext,bitIndex1)<->v48(VarCurr,bitIndex581))& (v262(VarNext,bitIndex0)<->v48(VarCurr,bitIndex580)))).
% 78.37/77.76  all VarNext (v264(VarNext)-> (all B (range_115_0(B)-> (v262(VarNext,B)<->v238(VarNext,B))))).
% 78.37/77.76  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v264(VarNext)<->v265(VarNext)&v233(VarNext))).
% 78.37/77.76  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v265(VarNext)<->v267(VarNext)&v188(VarNext))).
% 78.37/77.76  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v267(VarNext)<->v207(VarNext))).
% 78.37/77.76  all VarNext ((v48(VarNext,bitIndex524)<->v254(VarNext,bitIndex60))& (v48(VarNext,bitIndex523)<->v254(VarNext,bitIndex59))& (v48(VarNext,bitIndex522)<->v254(VarNext,bitIndex58))& (v48(VarNext,bitIndex521)<->v254(VarNext,bitIndex57))& (v48(VarNext,bitIndex520)<->v254(VarNext,bitIndex56))& (v48(VarNext,bitIndex519)<->v254(VarNext,bitIndex55))& (v48(VarNext,bitIndex518)<->v254(VarNext,bitIndex54))& (v48(VarNext,bitIndex517)<->v254(VarNext,bitIndex53))& (v48(VarNext,bitIndex516)<->v254(VarNext,bitIndex52))& (v48(VarNext,bitIndex515)<->v254(VarNext,bitIndex51))& (v48(VarNext,bitIndex514)<->v254(VarNext,bitIndex50))& (v48(VarNext,bitIndex513)<->v254(VarNext,bitIndex49))& (v48(VarNext,bitIndex512)<->v254(VarNext,bitIndex48))& (v48(VarNext,bitIndex511)<->v254(VarNext,bitIndex47))& (v48(VarNext,bitIndex510)<->v254(VarNext,bitIndex46))& (v48(VarNext,bitIndex509)<->v254(VarNext,bitIndex45))& (v48(VarNext,bitIndex508)<->v254(VarNext,bitIndex44))& (v48(VarNext,bitIndex507)<->v254(VarNext,bitIndex43))& (v48(VarNext,bitIndex506)<->v254(VarNext,bitIndex42))& (v48(VarNext,bitIndex505)<->v254(VarNext,bitIndex41))& (v48(VarNext,bitIndex504)<->v254(VarNext,bitIndex40))& (v48(VarNext,bitIndex503)<->v254(VarNext,bitIndex39))& (v48(VarNext,bitIndex502)<->v254(VarNext,bitIndex38))& (v48(VarNext,bitIndex501)<->v254(VarNext,bitIndex37))& (v48(VarNext,bitIndex500)<->v254(VarNext,bitIndex36))& (v48(VarNext,bitIndex499)<->v254(VarNext,bitIndex35))& (v48(VarNext,bitIndex498)<->v254(VarNext,bitIndex34))& (v48(VarNext,bitIndex497)<->v254(VarNext,bitIndex33))& (v48(VarNext,bitIndex496)<->v254(VarNext,bitIndex32))& (v48(VarNext,bitIndex495)<->v254(VarNext,bitIndex31))& (v48(VarNext,bitIndex494)<->v254(VarNext,bitIndex30))).
% 78.37/77.76  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v256(VarNext)-> (v254(VarNext,bitIndex115)<->v48(VarCurr,bitIndex579))& (v254(VarNext,bitIndex114)<->v48(VarCurr,bitIndex578))& (v254(VarNext,bitIndex113)<->v48(VarCurr,bitIndex577))& (v254(VarNext,bitIndex112)<->v48(VarCurr,bitIndex576))& (v254(VarNext,bitIndex111)<->v48(VarCurr,bitIndex575))& (v254(VarNext,bitIndex110)<->v48(VarCurr,bitIndex574))& (v254(VarNext,bitIndex109)<->v48(VarCurr,bitIndex573))& (v254(VarNext,bitIndex108)<->v48(VarCurr,bitIndex572))& (v254(VarNext,bitIndex107)<->v48(VarCurr,bitIndex571))& (v254(VarNext,bitIndex106)<->v48(VarCurr,bitIndex570))& (v254(VarNext,bitIndex105)<->v48(VarCurr,bitIndex569))& (v254(VarNext,bitIndex104)<->v48(VarCurr,bitIndex568))& (v254(VarNext,bitIndex103)<->v48(VarCurr,bitIndex567))& (v254(VarNext,bitIndex102)<->v48(VarCurr,bitIndex566))& (v254(VarNext,bitIndex101)<->v48(VarCurr,bitIndex565))& (v254(VarNext,bitIndex100)<->v48(VarCurr,bitIndex564))& (v254(VarNext,bitIndex99)<->v48(VarCurr,bitIndex563))& (v254(VarNext,bitIndex98)<->v48(VarCurr,bitIndex562))& (v254(VarNext,bitIndex97)<->v48(VarCurr,bitIndex561))& (v254(VarNext,bitIndex96)<->v48(VarCurr,bitIndex560))& (v254(VarNext,bitIndex95)<->v48(VarCurr,bitIndex559))& (v254(VarNext,bitIndex94)<->v48(VarCurr,bitIndex558))& (v254(VarNext,bitIndex93)<->v48(VarCurr,bitIndex557))& (v254(VarNext,bitIndex92)<->v48(VarCurr,bitIndex556))& (v254(VarNext,bitIndex91)<->v48(VarCurr,bitIndex555))& (v254(VarNext,bitIndex90)<->v48(VarCurr,bitIndex554))& (v254(VarNext,bitIndex89)<->v48(VarCurr,bitIndex553))& (v254(VarNext,bitIndex88)<->v48(VarCurr,bitIndex552))& (v254(VarNext,bitIndex87)<->v48(VarCurr,bitIndex551))& (v254(VarNext,bitIndex86)<->v48(VarCurr,bitIndex550))& (v254(VarNext,bitIndex85)<->v48(VarCurr,bitIndex549))& (v254(VarNext,bitIndex84)<->v48(VarCurr,bitIndex548))& (v254(VarNext,bitIndex83)<->v48(VarCurr,bitIndex547))& (v254(VarNext,bitIndex82)<->v48(VarCurr,bitIndex546))& (v254(VarNext,bitIndex81)<->v48(VarCurr,bitIndex545))& (v254(VarNext,bitIndex80)<->v48(VarCurr,bitIndex544))& (v254(VarNext,bitIndex79)<->v48(VarCurr,bitIndex543))& (v254(VarNext,bitIndex78)<->v48(VarCurr,bitIndex542))& (v254(VarNext,bitIndex77)<->v48(VarCurr,bitIndex541))& (v254(VarNext,bitIndex76)<->v48(VarCurr,bitIndex540))& (v254(VarNext,bitIndex75)<->v48(VarCurr,bitIndex539))& (v254(VarNext,bitIndex74)<->v48(VarCurr,bitIndex538))& (v254(VarNext,bitIndex73)<->v48(VarCurr,bitIndex537))& (v254(VarNext,bitIndex72)<->v48(VarCurr,bitIndex536))& (v254(VarNext,bitIndex71)<->v48(VarCurr,bitIndex535))& (v254(VarNext,bitIndex70)<->v48(VarCurr,bitIndex534))& (v254(VarNext,bitIndex69)<->v48(VarCurr,bitIndex533))& (v254(VarNext,bitIndex68)<->v48(VarCurr,bitIndex532))& (v254(VarNext,bitIndex67)<->v48(VarCurr,bitIndex531))& (v254(VarNext,bitIndex66)<->v48(VarCurr,bitIndex530))& (v254(VarNext,bitIndex65)<->v48(VarCurr,bitIndex529))& (v254(VarNext,bitIndex64)<->v48(VarCurr,bitIndex528))& (v254(VarNext,bitIndex63)<->v48(VarCurr,bitIndex527))& (v254(VarNext,bitIndex62)<->v48(VarCurr,bitIndex526))& (v254(VarNext,bitIndex61)<->v48(VarCurr,bitIndex525))& (v254(VarNext,bitIndex60)<->v48(VarCurr,bitIndex524))& (v254(VarNext,bitIndex59)<->v48(VarCurr,bitIndex523))& (v254(VarNext,bitIndex58)<->v48(VarCurr,bitIndex522))& (v254(VarNext,bitIndex57)<->v48(VarCurr,bitIndex521))& (v254(VarNext,bitIndex56)<->v48(VarCurr,bitIndex520))& (v254(VarNext,bitIndex55)<->v48(VarCurr,bitIndex519))& (v254(VarNext,bitIndex54)<->v48(VarCurr,bitIndex518))& (v254(VarNext,bitIndex53)<->v48(VarCurr,bitIndex517))& (v254(VarNext,bitIndex52)<->v48(VarCurr,bitIndex516))& (v254(VarNext,bitIndex51)<->v48(VarCurr,bitIndex515))& (v254(VarNext,bitIndex50)<->v48(VarCurr,bitIndex514))& (v254(VarNext,bitIndex49)<->v48(VarCurr,bitIndex513))& (v254(VarNext,bitIndex48)<->v48(VarCurr,bitIndex512))& (v254(VarNext,bitIndex47)<->v48(VarCurr,bitIndex511))& (v254(VarNext,bitIndex46)<->v48(VarCurr,bitIndex510))& (v254(VarNext,bitIndex45)<->v48(VarCurr,bitIndex509))& (v254(VarNext,bitIndex44)<->v48(VarCurr,bitIndex508))& (v254(VarNext,bitIndex43)<->v48(VarCurr,bitIndex507))& (v254(VarNext,bitIndex42)<->v48(VarCurr,bitIndex506))& (v254(VarNext,bitIndex41)<->v48(VarCurr,bitIndex505))& (v254(VarNext,bitIndex40)<->v48(VarCurr,bitIndex504))& (v254(VarNext,bitIndex39)<->v48(VarCurr,bitIndex503))& (v254(VarNext,bitIndex38)<->v48(VarCurr,bitIndex502))& (v254(VarNext,bitIndex37)<->v48(VarCurr,bitIndex501))& (v254(VarNext,bitIndex36)<->v48(VarCurr,bitIndex500))& (v254(VarNext,bitIndex35)<->v48(VarCurr,bitIndex499))& (v254(VarNext,bitIndex34)<->v48(VarCurr,bitIndex498))& (v254(VarNext,bitIndex33)<->v48(VarCurr,bitIndex497))& (v254(VarNext,bitIndex32)<->v48(VarCurr,bitIndex496))& (v254(VarNext,bitIndex31)<->v48(VarCurr,bitIndex495))& (v254(VarNext,bitIndex30)<->v48(VarCurr,bitIndex494))& (v254(VarNext,bitIndex29)<->v48(VarCurr,bitIndex493))& (v254(VarNext,bitIndex28)<->v48(VarCurr,bitIndex492))& (v254(VarNext,bitIndex27)<->v48(VarCurr,bitIndex491))& (v254(VarNext,bitIndex26)<->v48(VarCurr,bitIndex490))& (v254(VarNext,bitIndex25)<->v48(VarCurr,bitIndex489))& (v254(VarNext,bitIndex24)<->v48(VarCurr,bitIndex488))& (v254(VarNext,bitIndex23)<->v48(VarCurr,bitIndex487))& (v254(VarNext,bitIndex22)<->v48(VarCurr,bitIndex486))& (v254(VarNext,bitIndex21)<->v48(VarCurr,bitIndex485))& (v254(VarNext,bitIndex20)<->v48(VarCurr,bitIndex484))& (v254(VarNext,bitIndex19)<->v48(VarCurr,bitIndex483))& (v254(VarNext,bitIndex18)<->v48(VarCurr,bitIndex482))& (v254(VarNext,bitIndex17)<->v48(VarCurr,bitIndex481))& (v254(VarNext,bitIndex16)<->v48(VarCurr,bitIndex480))& (v254(VarNext,bitIndex15)<->v48(VarCurr,bitIndex479))& (v254(VarNext,bitIndex14)<->v48(VarCurr,bitIndex478))& (v254(VarNext,bitIndex13)<->v48(VarCurr,bitIndex477))& (v254(VarNext,bitIndex12)<->v48(VarCurr,bitIndex476))& (v254(VarNext,bitIndex11)<->v48(VarCurr,bitIndex475))& (v254(VarNext,bitIndex10)<->v48(VarCurr,bitIndex474))& (v254(VarNext,bitIndex9)<->v48(VarCurr,bitIndex473))& (v254(VarNext,bitIndex8)<->v48(VarCurr,bitIndex472))& (v254(VarNext,bitIndex7)<->v48(VarCurr,bitIndex471))& (v254(VarNext,bitIndex6)<->v48(VarCurr,bitIndex470))& (v254(VarNext,bitIndex5)<->v48(VarCurr,bitIndex469))& (v254(VarNext,bitIndex4)<->v48(VarCurr,bitIndex468))& (v254(VarNext,bitIndex3)<->v48(VarCurr,bitIndex467))& (v254(VarNext,bitIndex2)<->v48(VarCurr,bitIndex466))& (v254(VarNext,bitIndex1)<->v48(VarCurr,bitIndex465))& (v254(VarNext,bitIndex0)<->v48(VarCurr,bitIndex464)))).
% 78.37/77.76  all VarNext (v256(VarNext)-> (all B (range_115_0(B)-> (v254(VarNext,B)<->v219(VarNext,B))))).
% 78.37/77.76  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v256(VarNext)<->v257(VarNext)&v213(VarNext))).
% 78.37/77.76  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v257(VarNext)<->v259(VarNext)&v188(VarNext))).
% 78.37/77.76  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v259(VarNext)<->v207(VarNext))).
% 78.37/77.76  all VarCurr B (range_60_30(B)-> (v180(VarCurr,B)<->v182(VarCurr,B))).
% 78.37/77.76  all VarCurr B (range_60_30(B)-> (v182(VarCurr,B)<->v184(VarCurr,B))).
% 78.37/77.76  all VarCurr B (range_60_30(B)-> (v184(VarCurr,B)<->v186(VarCurr,B))).
% 78.37/77.76  all B (range_60_30(B)<->bitIndex30=B|bitIndex31=B|bitIndex32=B|bitIndex33=B|bitIndex34=B|bitIndex35=B|bitIndex36=B|bitIndex37=B|bitIndex38=B|bitIndex39=B|bitIndex40=B|bitIndex41=B|bitIndex42=B|bitIndex43=B|bitIndex44=B|bitIndex45=B|bitIndex46=B|bitIndex47=B|bitIndex48=B|bitIndex49=B|bitIndex50=B|bitIndex51=B|bitIndex52=B|bitIndex53=B|bitIndex54=B|bitIndex55=B|bitIndex56=B|bitIndex57=B|bitIndex58=B|bitIndex59=B|bitIndex60=B).
% 78.37/77.76  all VarCurr (v250(VarCurr)<->v252(VarCurr)).
% 78.37/77.76  all VarCurr (v246(VarCurr)<->v248(VarCurr)).
% 78.37/77.76  all VarCurr ((v159(VarCurr,bitIndex6)<->v42(VarCurr,bitIndex115))& (v159(VarCurr,bitIndex5)<->v42(VarCurr,bitIndex114))& (v159(VarCurr,bitIndex4)<->v42(VarCurr,bitIndex113))& (v159(VarCurr,bitIndex3)<->v42(VarCurr,bitIndex112))& (v159(VarCurr,bitIndex2)<->v42(VarCurr,bitIndex111))& (v159(VarCurr,bitIndex1)<->v42(VarCurr,bitIndex110))& (v159(VarCurr,bitIndex0)<->v42(VarCurr,bitIndex109))).
% 78.37/77.76  all VarCurr B (range_115_109(B)-> (v42(VarCurr,B)<->v44(VarCurr,B))).
% 78.37/77.76  all VarCurr B (range_115_109(B)-> (v44(VarCurr,B)<->v46(VarCurr,B))).
% 78.37/77.76  all VarCurr ((v46(VarCurr,bitIndex115)<->v48(VarCurr,bitIndex695))& (v46(VarCurr,bitIndex114)<->v48(VarCurr,bitIndex694))& (v46(VarCurr,bitIndex113)<->v48(VarCurr,bitIndex693))& (v46(VarCurr,bitIndex112)<->v48(VarCurr,bitIndex692))& (v46(VarCurr,bitIndex111)<->v48(VarCurr,bitIndex691))& (v46(VarCurr,bitIndex110)<->v48(VarCurr,bitIndex690))& (v46(VarCurr,bitIndex109)<->v48(VarCurr,bitIndex689))).
% 78.37/77.76  all VarNext ((v48(VarNext,bitIndex695)<->v224(VarNext,bitIndex115))& (v48(VarNext,bitIndex694)<->v224(VarNext,bitIndex114))& (v48(VarNext,bitIndex693)<->v224(VarNext,bitIndex113))& (v48(VarNext,bitIndex692)<->v224(VarNext,bitIndex112))& (v48(VarNext,bitIndex691)<->v224(VarNext,bitIndex111))& (v48(VarNext,bitIndex690)<->v224(VarNext,bitIndex110))& (v48(VarNext,bitIndex689)<->v224(VarNext,bitIndex109))).
% 78.37/77.77  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v225(VarNext)-> (v224(VarNext,bitIndex115)<->v48(VarCurr,bitIndex695))& (v224(VarNext,bitIndex114)<->v48(VarCurr,bitIndex694))& (v224(VarNext,bitIndex113)<->v48(VarCurr,bitIndex693))& (v224(VarNext,bitIndex112)<->v48(VarCurr,bitIndex692))& (v224(VarNext,bitIndex111)<->v48(VarCurr,bitIndex691))& (v224(VarNext,bitIndex110)<->v48(VarCurr,bitIndex690))& (v224(VarNext,bitIndex109)<->v48(VarCurr,bitIndex689))& (v224(VarNext,bitIndex108)<->v48(VarCurr,bitIndex688))& (v224(VarNext,bitIndex107)<->v48(VarCurr,bitIndex687))& (v224(VarNext,bitIndex106)<->v48(VarCurr,bitIndex686))& (v224(VarNext,bitIndex105)<->v48(VarCurr,bitIndex685))& (v224(VarNext,bitIndex104)<->v48(VarCurr,bitIndex684))& (v224(VarNext,bitIndex103)<->v48(VarCurr,bitIndex683))& (v224(VarNext,bitIndex102)<->v48(VarCurr,bitIndex682))& (v224(VarNext,bitIndex101)<->v48(VarCurr,bitIndex681))& (v224(VarNext,bitIndex100)<->v48(VarCurr,bitIndex680))& (v224(VarNext,bitIndex99)<->v48(VarCurr,bitIndex679))& (v224(VarNext,bitIndex98)<->v48(VarCurr,bitIndex678))& (v224(VarNext,bitIndex97)<->v48(VarCurr,bitIndex677))& (v224(VarNext,bitIndex96)<->v48(VarCurr,bitIndex676))& (v224(VarNext,bitIndex95)<->v48(VarCurr,bitIndex675))& (v224(VarNext,bitIndex94)<->v48(VarCurr,bitIndex674))& (v224(VarNext,bitIndex93)<->v48(VarCurr,bitIndex673))& (v224(VarNext,bitIndex92)<->v48(VarCurr,bitIndex672))& (v224(VarNext,bitIndex91)<->v48(VarCurr,bitIndex671))& (v224(VarNext,bitIndex90)<->v48(VarCurr,bitIndex670))& (v224(VarNext,bitIndex89)<->v48(VarCurr,bitIndex669))& (v224(VarNext,bitIndex88)<->v48(VarCurr,bitIndex668))& (v224(VarNext,bitIndex87)<->v48(VarCurr,bitIndex667))& (v224(VarNext,bitIndex86)<->v48(VarCurr,bitIndex666))& (v224(VarNext,bitIndex85)<->v48(VarCurr,bitIndex665))& (v224(VarNext,bitIndex84)<->v48(VarCurr,bitIndex664))& (v224(VarNext,bitIndex83)<->v48(VarCurr,bitIndex663))& (v224(VarNext,bitIndex82)<->v48(VarCurr,bitIndex662))& (v224(VarNext,bitIndex81)<->v48(VarCurr,bitIndex661))& (v224(VarNext,bitIndex80)<->v48(VarCurr,bitIndex660))& (v224(VarNext,bitIndex79)<->v48(VarCurr,bitIndex659))& (v224(VarNext,bitIndex78)<->v48(VarCurr,bitIndex658))& (v224(VarNext,bitIndex77)<->v48(VarCurr,bitIndex657))& (v224(VarNext,bitIndex76)<->v48(VarCurr,bitIndex656))& (v224(VarNext,bitIndex75)<->v48(VarCurr,bitIndex655))& (v224(VarNext,bitIndex74)<->v48(VarCurr,bitIndex654))& (v224(VarNext,bitIndex73)<->v48(VarCurr,bitIndex653))& (v224(VarNext,bitIndex72)<->v48(VarCurr,bitIndex652))& (v224(VarNext,bitIndex71)<->v48(VarCurr,bitIndex651))& (v224(VarNext,bitIndex70)<->v48(VarCurr,bitIndex650))& (v224(VarNext,bitIndex69)<->v48(VarCurr,bitIndex649))& (v224(VarNext,bitIndex68)<->v48(VarCurr,bitIndex648))& (v224(VarNext,bitIndex67)<->v48(VarCurr,bitIndex647))& (v224(VarNext,bitIndex66)<->v48(VarCurr,bitIndex646))& (v224(VarNext,bitIndex65)<->v48(VarCurr,bitIndex645))& (v224(VarNext,bitIndex64)<->v48(VarCurr,bitIndex644))& (v224(VarNext,bitIndex63)<->v48(VarCurr,bitIndex643))& (v224(VarNext,bitIndex62)<->v48(VarCurr,bitIndex642))& (v224(VarNext,bitIndex61)<->v48(VarCurr,bitIndex641))& (v224(VarNext,bitIndex60)<->v48(VarCurr,bitIndex640))& (v224(VarNext,bitIndex59)<->v48(VarCurr,bitIndex639))& (v224(VarNext,bitIndex58)<->v48(VarCurr,bitIndex638))& (v224(VarNext,bitIndex57)<->v48(VarCurr,bitIndex637))& (v224(VarNext,bitIndex56)<->v48(VarCurr,bitIndex636))& (v224(VarNext,bitIndex55)<->v48(VarCurr,bitIndex635))& (v224(VarNext,bitIndex54)<->v48(VarCurr,bitIndex634))& (v224(VarNext,bitIndex53)<->v48(VarCurr,bitIndex633))& (v224(VarNext,bitIndex52)<->v48(VarCurr,bitIndex632))& (v224(VarNext,bitIndex51)<->v48(VarCurr,bitIndex631))& (v224(VarNext,bitIndex50)<->v48(VarCurr,bitIndex630))& (v224(VarNext,bitIndex49)<->v48(VarCurr,bitIndex629))& (v224(VarNext,bitIndex48)<->v48(VarCurr,bitIndex628))& (v224(VarNext,bitIndex47)<->v48(VarCurr,bitIndex627))& (v224(VarNext,bitIndex46)<->v48(VarCurr,bitIndex626))& (v224(VarNext,bitIndex45)<->v48(VarCurr,bitIndex625))& (v224(VarNext,bitIndex44)<->v48(VarCurr,bitIndex624))& (v224(VarNext,bitIndex43)<->v48(VarCurr,bitIndex623))& (v224(VarNext,bitIndex42)<->v48(VarCurr,bitIndex622))& (v224(VarNext,bitIndex41)<->v48(VarCurr,bitIndex621))& (v224(VarNext,bitIndex40)<->v48(VarCurr,bitIndex620))& (v224(VarNext,bitIndex39)<->v48(VarCurr,bitIndex619))& (v224(VarNext,bitIndex38)<->v48(VarCurr,bitIndex618))& (v224(VarNext,bitIndex37)<->v48(VarCurr,bitIndex617))& (v224(VarNext,bitIndex36)<->v48(VarCurr,bitIndex616))& (v224(VarNext,bitIndex35)<->v48(VarCurr,bitIndex615))& (v224(VarNext,bitIndex34)<->v48(VarCurr,bitIndex614))& (v224(VarNext,bitIndex33)<->v48(VarCurr,bitIndex613))& (v224(VarNext,bitIndex32)<->v48(VarCurr,bitIndex612))& (v224(VarNext,bitIndex31)<->v48(VarCurr,bitIndex611))& (v224(VarNext,bitIndex30)<->v48(VarCurr,bitIndex610))& (v224(VarNext,bitIndex29)<->v48(VarCurr,bitIndex609))& (v224(VarNext,bitIndex28)<->v48(VarCurr,bitIndex608))& (v224(VarNext,bitIndex27)<->v48(VarCurr,bitIndex607))& (v224(VarNext,bitIndex26)<->v48(VarCurr,bitIndex606))& (v224(VarNext,bitIndex25)<->v48(VarCurr,bitIndex605))& (v224(VarNext,bitIndex24)<->v48(VarCurr,bitIndex604))& (v224(VarNext,bitIndex23)<->v48(VarCurr,bitIndex603))& (v224(VarNext,bitIndex22)<->v48(VarCurr,bitIndex602))& (v224(VarNext,bitIndex21)<->v48(VarCurr,bitIndex601))& (v224(VarNext,bitIndex20)<->v48(VarCurr,bitIndex600))& (v224(VarNext,bitIndex19)<->v48(VarCurr,bitIndex599))& (v224(VarNext,bitIndex18)<->v48(VarCurr,bitIndex598))& (v224(VarNext,bitIndex17)<->v48(VarCurr,bitIndex597))& (v224(VarNext,bitIndex16)<->v48(VarCurr,bitIndex596))& (v224(VarNext,bitIndex15)<->v48(VarCurr,bitIndex595))& (v224(VarNext,bitIndex14)<->v48(VarCurr,bitIndex594))& (v224(VarNext,bitIndex13)<->v48(VarCurr,bitIndex593))& (v224(VarNext,bitIndex12)<->v48(VarCurr,bitIndex592))& (v224(VarNext,bitIndex11)<->v48(VarCurr,bitIndex591))& (v224(VarNext,bitIndex10)<->v48(VarCurr,bitIndex590))& (v224(VarNext,bitIndex9)<->v48(VarCurr,bitIndex589))& (v224(VarNext,bitIndex8)<->v48(VarCurr,bitIndex588))& (v224(VarNext,bitIndex7)<->v48(VarCurr,bitIndex587))& (v224(VarNext,bitIndex6)<->v48(VarCurr,bitIndex586))& (v224(VarNext,bitIndex5)<->v48(VarCurr,bitIndex585))& (v224(VarNext,bitIndex4)<->v48(VarCurr,bitIndex584))& (v224(VarNext,bitIndex3)<->v48(VarCurr,bitIndex583))& (v224(VarNext,bitIndex2)<->v48(VarCurr,bitIndex582))& (v224(VarNext,bitIndex1)<->v48(VarCurr,bitIndex581))& (v224(VarNext,bitIndex0)<->v48(VarCurr,bitIndex580)))).
% 78.37/77.77  all VarNext (v225(VarNext)-> (all B (range_115_0(B)-> (v224(VarNext,B)<->v238(VarNext,B))))).
% 78.37/77.77  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_115_0(B)-> (v238(VarNext,B)<->v236(VarCurr,B))))).
% 78.37/77.77  all VarCurr (-v214(VarCurr)-> (all B (range_115_0(B)-> (v236(VarCurr,B)<->v239(VarCurr,B))))).
% 78.37/77.77  all VarCurr (v214(VarCurr)-> (all B (range_115_0(B)-> (v236(VarCurr,B)<->$F)))).
% 78.37/77.77  all VarCurr (-v161(VarCurr,bitIndex0)-> (all B (range_115_0(B)-> (v239(VarCurr,B)<->v180(VarCurr,B))))).
% 78.37/77.77  all VarCurr (v161(VarCurr,bitIndex0)-> (v239(VarCurr,bitIndex115)<->v48(VarCurr,bitIndex579))& (v239(VarCurr,bitIndex114)<->v48(VarCurr,bitIndex578))& (v239(VarCurr,bitIndex113)<->v48(VarCurr,bitIndex577))& (v239(VarCurr,bitIndex112)<->v48(VarCurr,bitIndex576))& (v239(VarCurr,bitIndex111)<->v48(VarCurr,bitIndex575))& (v239(VarCurr,bitIndex110)<->v48(VarCurr,bitIndex574))& (v239(VarCurr,bitIndex109)<->v48(VarCurr,bitIndex573))& (v239(VarCurr,bitIndex108)<->v48(VarCurr,bitIndex572))& (v239(VarCurr,bitIndex107)<->v48(VarCurr,bitIndex571))& (v239(VarCurr,bitIndex106)<->v48(VarCurr,bitIndex570))& (v239(VarCurr,bitIndex105)<->v48(VarCurr,bitIndex569))& (v239(VarCurr,bitIndex104)<->v48(VarCurr,bitIndex568))& (v239(VarCurr,bitIndex103)<->v48(VarCurr,bitIndex567))& (v239(VarCurr,bitIndex102)<->v48(VarCurr,bitIndex566))& (v239(VarCurr,bitIndex101)<->v48(VarCurr,bitIndex565))& (v239(VarCurr,bitIndex100)<->v48(VarCurr,bitIndex564))& (v239(VarCurr,bitIndex99)<->v48(VarCurr,bitIndex563))& (v239(VarCurr,bitIndex98)<->v48(VarCurr,bitIndex562))& (v239(VarCurr,bitIndex97)<->v48(VarCurr,bitIndex561))& (v239(VarCurr,bitIndex96)<->v48(VarCurr,bitIndex560))& (v239(VarCurr,bitIndex95)<->v48(VarCurr,bitIndex559))& (v239(VarCurr,bitIndex94)<->v48(VarCurr,bitIndex558))& (v239(VarCurr,bitIndex93)<->v48(VarCurr,bitIndex557))& (v239(VarCurr,bitIndex92)<->v48(VarCurr,bitIndex556))& (v239(VarCurr,bitIndex91)<->v48(VarCurr,bitIndex555))& (v239(VarCurr,bitIndex90)<->v48(VarCurr,bitIndex554))& (v239(VarCurr,bitIndex89)<->v48(VarCurr,bitIndex553))& (v239(VarCurr,bitIndex88)<->v48(VarCurr,bitIndex552))& (v239(VarCurr,bitIndex87)<->v48(VarCurr,bitIndex551))& (v239(VarCurr,bitIndex86)<->v48(VarCurr,bitIndex550))& (v239(VarCurr,bitIndex85)<->v48(VarCurr,bitIndex549))& (v239(VarCurr,bitIndex84)<->v48(VarCurr,bitIndex548))& (v239(VarCurr,bitIndex83)<->v48(VarCurr,bitIndex547))& (v239(VarCurr,bitIndex82)<->v48(VarCurr,bitIndex546))& (v239(VarCurr,bitIndex81)<->v48(VarCurr,bitIndex545))& (v239(VarCurr,bitIndex80)<->v48(VarCurr,bitIndex544))& (v239(VarCurr,bitIndex79)<->v48(VarCurr,bitIndex543))& (v239(VarCurr,bitIndex78)<->v48(VarCurr,bitIndex542))& (v239(VarCurr,bitIndex77)<->v48(VarCurr,bitIndex541))& (v239(VarCurr,bitIndex76)<->v48(VarCurr,bitIndex540))& (v239(VarCurr,bitIndex75)<->v48(VarCurr,bitIndex539))& (v239(VarCurr,bitIndex74)<->v48(VarCurr,bitIndex538))& (v239(VarCurr,bitIndex73)<->v48(VarCurr,bitIndex537))& (v239(VarCurr,bitIndex72)<->v48(VarCurr,bitIndex536))& (v239(VarCurr,bitIndex71)<->v48(VarCurr,bitIndex535))& (v239(VarCurr,bitIndex70)<->v48(VarCurr,bitIndex534))& (v239(VarCurr,bitIndex69)<->v48(VarCurr,bitIndex533))& (v239(VarCurr,bitIndex68)<->v48(VarCurr,bitIndex532))& (v239(VarCurr,bitIndex67)<->v48(VarCurr,bitIndex531))& (v239(VarCurr,bitIndex66)<->v48(VarCurr,bitIndex530))& (v239(VarCurr,bitIndex65)<->v48(VarCurr,bitIndex529))& (v239(VarCurr,bitIndex64)<->v48(VarCurr,bitIndex528))& (v239(VarCurr,bitIndex63)<->v48(VarCurr,bitIndex527))& (v239(VarCurr,bitIndex62)<->v48(VarCurr,bitIndex526))& (v239(VarCurr,bitIndex61)<->v48(VarCurr,bitIndex525))& (v239(VarCurr,bitIndex60)<->v48(VarCurr,bitIndex524))& (v239(VarCurr,bitIndex59)<->v48(VarCurr,bitIndex523))& (v239(VarCurr,bitIndex58)<->v48(VarCurr,bitIndex522))& (v239(VarCurr,bitIndex57)<->v48(VarCurr,bitIndex521))& (v239(VarCurr,bitIndex56)<->v48(VarCurr,bitIndex520))& (v239(VarCurr,bitIndex55)<->v48(VarCurr,bitIndex519))& (v239(VarCurr,bitIndex54)<->v48(VarCurr,bitIndex518))& (v239(VarCurr,bitIndex53)<->v48(VarCurr,bitIndex517))& (v239(VarCurr,bitIndex52)<->v48(VarCurr,bitIndex516))& (v239(VarCurr,bitIndex51)<->v48(VarCurr,bitIndex515))& (v239(VarCurr,bitIndex50)<->v48(VarCurr,bitIndex514))& (v239(VarCurr,bitIndex49)<->v48(VarCurr,bitIndex513))& (v239(VarCurr,bitIndex48)<->v48(VarCurr,bitIndex512))& (v239(VarCurr,bitIndex47)<->v48(VarCurr,bitIndex511))& (v239(VarCurr,bitIndex46)<->v48(VarCurr,bitIndex510))& (v239(VarCurr,bitIndex45)<->v48(VarCurr,bitIndex509))& (v239(VarCurr,bitIndex44)<->v48(VarCurr,bitIndex508))& (v239(VarCurr,bitIndex43)<->v48(VarCurr,bitIndex507))& (v239(VarCurr,bitIndex42)<->v48(VarCurr,bitIndex506))& (v239(VarCurr,bitIndex41)<->v48(VarCurr,bitIndex505))& (v239(VarCurr,bitIndex40)<->v48(VarCurr,bitIndex504))& (v239(VarCurr,bitIndex39)<->v48(VarCurr,bitIndex503))& (v239(VarCurr,bitIndex38)<->v48(VarCurr,bitIndex502))& (v239(VarCurr,bitIndex37)<->v48(VarCurr,bitIndex501))& (v239(VarCurr,bitIndex36)<->v48(VarCurr,bitIndex500))& (v239(VarCurr,bitIndex35)<->v48(VarCurr,bitIndex499))& (v239(VarCurr,bitIndex34)<->v48(VarCurr,bitIndex498))& (v239(VarCurr,bitIndex33)<->v48(VarCurr,bitIndex497))& (v239(VarCurr,bitIndex32)<->v48(VarCurr,bitIndex496))& (v239(VarCurr,bitIndex31)<->v48(VarCurr,bitIndex495))& (v239(VarCurr,bitIndex30)<->v48(VarCurr,bitIndex494))& (v239(VarCurr,bitIndex29)<->v48(VarCurr,bitIndex493))& (v239(VarCurr,bitIndex28)<->v48(VarCurr,bitIndex492))& (v239(VarCurr,bitIndex27)<->v48(VarCurr,bitIndex491))& (v239(VarCurr,bitIndex26)<->v48(VarCurr,bitIndex490))& (v239(VarCurr,bitIndex25)<->v48(VarCurr,bitIndex489))& (v239(VarCurr,bitIndex24)<->v48(VarCurr,bitIndex488))& (v239(VarCurr,bitIndex23)<->v48(VarCurr,bitIndex487))& (v239(VarCurr,bitIndex22)<->v48(VarCurr,bitIndex486))& (v239(VarCurr,bitIndex21)<->v48(VarCurr,bitIndex485))& (v239(VarCurr,bitIndex20)<->v48(VarCurr,bitIndex484))& (v239(VarCurr,bitIndex19)<->v48(VarCurr,bitIndex483))& (v239(VarCurr,bitIndex18)<->v48(VarCurr,bitIndex482))& (v239(VarCurr,bitIndex17)<->v48(VarCurr,bitIndex481))& (v239(VarCurr,bitIndex16)<->v48(VarCurr,bitIndex480))& (v239(VarCurr,bitIndex15)<->v48(VarCurr,bitIndex479))& (v239(VarCurr,bitIndex14)<->v48(VarCurr,bitIndex478))& (v239(VarCurr,bitIndex13)<->v48(VarCurr,bitIndex477))& (v239(VarCurr,bitIndex12)<->v48(VarCurr,bitIndex476))& (v239(VarCurr,bitIndex11)<->v48(VarCurr,bitIndex475))& (v239(VarCurr,bitIndex10)<->v48(VarCurr,bitIndex474))& (v239(VarCurr,bitIndex9)<->v48(VarCurr,bitIndex473))& (v239(VarCurr,bitIndex8)<->v48(VarCurr,bitIndex472))& (v239(VarCurr,bitIndex7)<->v48(VarCurr,bitIndex471))& (v239(VarCurr,bitIndex6)<->v48(VarCurr,bitIndex470))& (v239(VarCurr,bitIndex5)<->v48(VarCurr,bitIndex469))& (v239(VarCurr,bitIndex4)<->v48(VarCurr,bitIndex468))& (v239(VarCurr,bitIndex3)<->v48(VarCurr,bitIndex467))& (v239(VarCurr,bitIndex2)<->v48(VarCurr,bitIndex466))& (v239(VarCurr,bitIndex1)<->v48(VarCurr,bitIndex465))& (v239(VarCurr,bitIndex0)<->v48(VarCurr,bitIndex464))).
% 78.37/77.77  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v225(VarNext)<->v226(VarNext)&v233(VarNext))).
% 78.37/77.77  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v233(VarNext)<->v231(VarCurr))).
% 78.37/77.77  all VarCurr (v231(VarCurr)<->v214(VarCurr)|v234(VarCurr)).
% 78.37/77.77  all VarCurr (v234(VarCurr)<->v57(VarCurr,bitIndex0)&v235(VarCurr)).
% 78.37/77.77  all VarCurr (-v235(VarCurr)<->v214(VarCurr)).
% 78.37/77.77  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v226(VarNext)<->v228(VarNext)&v188(VarNext))).
% 78.37/77.77  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v228(VarNext)<->v207(VarNext))).
% 78.37/77.77  all VarNext ((v48(VarNext,bitIndex579)<->v202(VarNext,bitIndex115))& (v48(VarNext,bitIndex578)<->v202(VarNext,bitIndex114))& (v48(VarNext,bitIndex577)<->v202(VarNext,bitIndex113))& (v48(VarNext,bitIndex576)<->v202(VarNext,bitIndex112))& (v48(VarNext,bitIndex575)<->v202(VarNext,bitIndex111))& (v48(VarNext,bitIndex574)<->v202(VarNext,bitIndex110))& (v48(VarNext,bitIndex573)<->v202(VarNext,bitIndex109))).
% 78.37/77.77  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v203(VarNext)-> (v202(VarNext,bitIndex115)<->v48(VarCurr,bitIndex579))& (v202(VarNext,bitIndex114)<->v48(VarCurr,bitIndex578))& (v202(VarNext,bitIndex113)<->v48(VarCurr,bitIndex577))& (v202(VarNext,bitIndex112)<->v48(VarCurr,bitIndex576))& (v202(VarNext,bitIndex111)<->v48(VarCurr,bitIndex575))& (v202(VarNext,bitIndex110)<->v48(VarCurr,bitIndex574))& (v202(VarNext,bitIndex109)<->v48(VarCurr,bitIndex573))& (v202(VarNext,bitIndex108)<->v48(VarCurr,bitIndex572))& (v202(VarNext,bitIndex107)<->v48(VarCurr,bitIndex571))& (v202(VarNext,bitIndex106)<->v48(VarCurr,bitIndex570))& (v202(VarNext,bitIndex105)<->v48(VarCurr,bitIndex569))& (v202(VarNext,bitIndex104)<->v48(VarCurr,bitIndex568))& (v202(VarNext,bitIndex103)<->v48(VarCurr,bitIndex567))& (v202(VarNext,bitIndex102)<->v48(VarCurr,bitIndex566))& (v202(VarNext,bitIndex101)<->v48(VarCurr,bitIndex565))& (v202(VarNext,bitIndex100)<->v48(VarCurr,bitIndex564))& (v202(VarNext,bitIndex99)<->v48(VarCurr,bitIndex563))& (v202(VarNext,bitIndex98)<->v48(VarCurr,bitIndex562))& (v202(VarNext,bitIndex97)<->v48(VarCurr,bitIndex561))& (v202(VarNext,bitIndex96)<->v48(VarCurr,bitIndex560))& (v202(VarNext,bitIndex95)<->v48(VarCurr,bitIndex559))& (v202(VarNext,bitIndex94)<->v48(VarCurr,bitIndex558))& (v202(VarNext,bitIndex93)<->v48(VarCurr,bitIndex557))& (v202(VarNext,bitIndex92)<->v48(VarCurr,bitIndex556))& (v202(VarNext,bitIndex91)<->v48(VarCurr,bitIndex555))& (v202(VarNext,bitIndex90)<->v48(VarCurr,bitIndex554))& (v202(VarNext,bitIndex89)<->v48(VarCurr,bitIndex553))& (v202(VarNext,bitIndex88)<->v48(VarCurr,bitIndex552))& (v202(VarNext,bitIndex87)<->v48(VarCurr,bitIndex551))& (v202(VarNext,bitIndex86)<->v48(VarCurr,bitIndex550))& (v202(VarNext,bitIndex85)<->v48(VarCurr,bitIndex549))& (v202(VarNext,bitIndex84)<->v48(VarCurr,bitIndex548))& (v202(VarNext,bitIndex83)<->v48(VarCurr,bitIndex547))& (v202(VarNext,bitIndex82)<->v48(VarCurr,bitIndex546))& (v202(VarNext,bitIndex81)<->v48(VarCurr,bitIndex545))& (v202(VarNext,bitIndex80)<->v48(VarCurr,bitIndex544))& (v202(VarNext,bitIndex79)<->v48(VarCurr,bitIndex543))& (v202(VarNext,bitIndex78)<->v48(VarCurr,bitIndex542))& (v202(VarNext,bitIndex77)<->v48(VarCurr,bitIndex541))& (v202(VarNext,bitIndex76)<->v48(VarCurr,bitIndex540))& (v202(VarNext,bitIndex75)<->v48(VarCurr,bitIndex539))& (v202(VarNext,bitIndex74)<->v48(VarCurr,bitIndex538))& (v202(VarNext,bitIndex73)<->v48(VarCurr,bitIndex537))& (v202(VarNext,bitIndex72)<->v48(VarCurr,bitIndex536))& (v202(VarNext,bitIndex71)<->v48(VarCurr,bitIndex535))& (v202(VarNext,bitIndex70)<->v48(VarCurr,bitIndex534))& (v202(VarNext,bitIndex69)<->v48(VarCurr,bitIndex533))& (v202(VarNext,bitIndex68)<->v48(VarCurr,bitIndex532))& (v202(VarNext,bitIndex67)<->v48(VarCurr,bitIndex531))& (v202(VarNext,bitIndex66)<->v48(VarCurr,bitIndex530))& (v202(VarNext,bitIndex65)<->v48(VarCurr,bitIndex529))& (v202(VarNext,bitIndex64)<->v48(VarCurr,bitIndex528))& (v202(VarNext,bitIndex63)<->v48(VarCurr,bitIndex527))& (v202(VarNext,bitIndex62)<->v48(VarCurr,bitIndex526))& (v202(VarNext,bitIndex61)<->v48(VarCurr,bitIndex525))& (v202(VarNext,bitIndex60)<->v48(VarCurr,bitIndex524))& (v202(VarNext,bitIndex59)<->v48(VarCurr,bitIndex523))& (v202(VarNext,bitIndex58)<->v48(VarCurr,bitIndex522))& (v202(VarNext,bitIndex57)<->v48(VarCurr,bitIndex521))& (v202(VarNext,bitIndex56)<->v48(VarCurr,bitIndex520))& (v202(VarNext,bitIndex55)<->v48(VarCurr,bitIndex519))& (v202(VarNext,bitIndex54)<->v48(VarCurr,bitIndex518))& (v202(VarNext,bitIndex53)<->v48(VarCurr,bitIndex517))& (v202(VarNext,bitIndex52)<->v48(VarCurr,bitIndex516))& (v202(VarNext,bitIndex51)<->v48(VarCurr,bitIndex515))& (v202(VarNext,bitIndex50)<->v48(VarCurr,bitIndex514))& (v202(VarNext,bitIndex49)<->v48(VarCurr,bitIndex513))& (v202(VarNext,bitIndex48)<->v48(VarCurr,bitIndex512))& (v202(VarNext,bitIndex47)<->v48(VarCurr,bitIndex511))& (v202(VarNext,bitIndex46)<->v48(VarCurr,bitIndex510))& (v202(VarNext,bitIndex45)<->v48(VarCurr,bitIndex509))& (v202(VarNext,bitIndex44)<->v48(VarCurr,bitIndex508))& (v202(VarNext,bitIndex43)<->v48(VarCurr,bitIndex507))& (v202(VarNext,bitIndex42)<->v48(VarCurr,bitIndex506))& (v202(VarNext,bitIndex41)<->v48(VarCurr,bitIndex505))& (v202(VarNext,bitIndex40)<->v48(VarCurr,bitIndex504))& (v202(VarNext,bitIndex39)<->v48(VarCurr,bitIndex503))& (v202(VarNext,bitIndex38)<->v48(VarCurr,bitIndex502))& (v202(VarNext,bitIndex37)<->v48(VarCurr,bitIndex501))& (v202(VarNext,bitIndex36)<->v48(VarCurr,bitIndex500))& (v202(VarNext,bitIndex35)<->v48(VarCurr,bitIndex499))& (v202(VarNext,bitIndex34)<->v48(VarCurr,bitIndex498))& (v202(VarNext,bitIndex33)<->v48(VarCurr,bitIndex497))& (v202(VarNext,bitIndex32)<->v48(VarCurr,bitIndex496))& (v202(VarNext,bitIndex31)<->v48(VarCurr,bitIndex495))& (v202(VarNext,bitIndex30)<->v48(VarCurr,bitIndex494))& (v202(VarNext,bitIndex29)<->v48(VarCurr,bitIndex493))& (v202(VarNext,bitIndex28)<->v48(VarCurr,bitIndex492))& (v202(VarNext,bitIndex27)<->v48(VarCurr,bitIndex491))& (v202(VarNext,bitIndex26)<->v48(VarCurr,bitIndex490))& (v202(VarNext,bitIndex25)<->v48(VarCurr,bitIndex489))& (v202(VarNext,bitIndex24)<->v48(VarCurr,bitIndex488))& (v202(VarNext,bitIndex23)<->v48(VarCurr,bitIndex487))& (v202(VarNext,bitIndex22)<->v48(VarCurr,bitIndex486))& (v202(VarNext,bitIndex21)<->v48(VarCurr,bitIndex485))& (v202(VarNext,bitIndex20)<->v48(VarCurr,bitIndex484))& (v202(VarNext,bitIndex19)<->v48(VarCurr,bitIndex483))& (v202(VarNext,bitIndex18)<->v48(VarCurr,bitIndex482))& (v202(VarNext,bitIndex17)<->v48(VarCurr,bitIndex481))& (v202(VarNext,bitIndex16)<->v48(VarCurr,bitIndex480))& (v202(VarNext,bitIndex15)<->v48(VarCurr,bitIndex479))& (v202(VarNext,bitIndex14)<->v48(VarCurr,bitIndex478))& (v202(VarNext,bitIndex13)<->v48(VarCurr,bitIndex477))& (v202(VarNext,bitIndex12)<->v48(VarCurr,bitIndex476))& (v202(VarNext,bitIndex11)<->v48(VarCurr,bitIndex475))& (v202(VarNext,bitIndex10)<->v48(VarCurr,bitIndex474))& (v202(VarNext,bitIndex9)<->v48(VarCurr,bitIndex473))& (v202(VarNext,bitIndex8)<->v48(VarCurr,bitIndex472))& (v202(VarNext,bitIndex7)<->v48(VarCurr,bitIndex471))& (v202(VarNext,bitIndex6)<->v48(VarCurr,bitIndex470))& (v202(VarNext,bitIndex5)<->v48(VarCurr,bitIndex469))& (v202(VarNext,bitIndex4)<->v48(VarCurr,bitIndex468))& (v202(VarNext,bitIndex3)<->v48(VarCurr,bitIndex467))& (v202(VarNext,bitIndex2)<->v48(VarCurr,bitIndex466))& (v202(VarNext,bitIndex1)<->v48(VarCurr,bitIndex465))& (v202(VarNext,bitIndex0)<->v48(VarCurr,bitIndex464)))).
% 78.37/77.77  all VarNext (v203(VarNext)-> (all B (range_115_0(B)-> (v202(VarNext,B)<->v219(VarNext,B))))).
% 78.37/77.77  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_115_0(B)-> (v219(VarNext,B)<->v217(VarCurr,B))))).
% 78.37/77.77  all VarCurr (-v214(VarCurr)-> (all B (range_115_0(B)-> (v217(VarCurr,B)<->v220(VarCurr,B))))).
% 78.37/77.77  all VarCurr (v214(VarCurr)-> (all B (range_115_0(B)-> (v217(VarCurr,B)<->$F)))).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex115).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex114).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex113).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex112).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex111).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex110).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex109).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex108).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex107).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex106).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex105).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex104).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex103).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex102).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex101).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex100).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex99).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex98).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex97).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex96).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex95).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex94).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex93).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex92).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex91).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex90).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex89).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex88).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex87).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex86).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex85).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex84).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex83).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex82).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex81).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex80).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex79).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex78).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex77).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex76).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex75).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex74).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex73).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex72).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex71).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex70).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex69).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex68).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex67).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex66).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex65).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex64).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex63).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex62).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex61).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex60).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex59).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex58).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex57).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex56).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex55).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex54).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex53).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex52).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex51).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex50).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex49).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex48).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex47).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex46).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex45).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex44).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex43).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex42).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex41).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex40).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex39).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex38).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex37).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex36).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex35).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex34).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex33).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex32).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex31).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex30).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex29).
% 78.37/77.77  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex28).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex27).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex26).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex25).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex24).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex23).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex22).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex21).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex20).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex19).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex18).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex17).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex16).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex15).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex14).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex13).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex12).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex11).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex10).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex9).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex8).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex7).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex6).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex5).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex4).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex3).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex2).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex1).
% 78.37/77.78  -b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000(bitIndex0).
% 78.37/77.78  all VarCurr (-v161(VarCurr,bitIndex1)-> (all B (range_115_0(B)-> (v220(VarCurr,B)<->v180(VarCurr,B))))).
% 78.37/77.78  all B (range_115_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B|bitIndex6=B|bitIndex7=B|bitIndex8=B|bitIndex9=B|bitIndex10=B|bitIndex11=B|bitIndex12=B|bitIndex13=B|bitIndex14=B|bitIndex15=B|bitIndex16=B|bitIndex17=B|bitIndex18=B|bitIndex19=B|bitIndex20=B|bitIndex21=B|bitIndex22=B|bitIndex23=B|bitIndex24=B|bitIndex25=B|bitIndex26=B|bitIndex27=B|bitIndex28=B|bitIndex29=B|bitIndex30=B|bitIndex31=B|bitIndex32=B|bitIndex33=B|bitIndex34=B|bitIndex35=B|bitIndex36=B|bitIndex37=B|bitIndex38=B|bitIndex39=B|bitIndex40=B|bitIndex41=B|bitIndex42=B|bitIndex43=B|bitIndex44=B|bitIndex45=B|bitIndex46=B|bitIndex47=B|bitIndex48=B|bitIndex49=B|bitIndex50=B|bitIndex51=B|bitIndex52=B|bitIndex53=B|bitIndex54=B|bitIndex55=B|bitIndex56=B|bitIndex57=B|bitIndex58=B|bitIndex59=B|bitIndex60=B|bitIndex61=B|bitIndex62=B|bitIndex63=B|bitIndex64=B|bitIndex65=B|bitIndex66=B|bitIndex67=B|bitIndex68=B|bitIndex69=B|bitIndex70=B|bitIndex71=B|bitIndex72=B|bitIndex73=B|bitIndex74=B|bitIndex75=B|bitIndex76=B|bitIndex77=B|bitIndex78=B|bitIndex79=B|bitIndex80=B|bitIndex81=B|bitIndex82=B|bitIndex83=B|bitIndex84=B|bitIndex85=B|bitIndex86=B|bitIndex87=B|bitIndex88=B|bitIndex89=B|bitIndex90=B|bitIndex91=B|bitIndex92=B|bitIndex93=B|bitIndex94=B|bitIndex95=B|bitIndex96=B|bitIndex97=B|bitIndex98=B|bitIndex99=B|bitIndex100=B|bitIndex101=B|bitIndex102=B|bitIndex103=B|bitIndex104=B|bitIndex105=B|bitIndex106=B|bitIndex107=B|bitIndex108=B|bitIndex109=B|bitIndex110=B|bitIndex111=B|bitIndex112=B|bitIndex113=B|bitIndex114=B|bitIndex115=B).
% 78.37/77.78  all VarCurr (v161(VarCurr,bitIndex1)-> (v220(VarCurr,bitIndex115)<->v48(VarCurr,bitIndex463))& (v220(VarCurr,bitIndex114)<->v48(VarCurr,bitIndex462))& (v220(VarCurr,bitIndex113)<->v48(VarCurr,bitIndex461))& (v220(VarCurr,bitIndex112)<->v48(VarCurr,bitIndex460))& (v220(VarCurr,bitIndex111)<->v48(VarCurr,bitIndex459))& (v220(VarCurr,bitIndex110)<->v48(VarCurr,bitIndex458))& (v220(VarCurr,bitIndex109)<->v48(VarCurr,bitIndex457))& (v220(VarCurr,bitIndex108)<->v48(VarCurr,bitIndex456))& (v220(VarCurr,bitIndex107)<->v48(VarCurr,bitIndex455))& (v220(VarCurr,bitIndex106)<->v48(VarCurr,bitIndex454))& (v220(VarCurr,bitIndex105)<->v48(VarCurr,bitIndex453))& (v220(VarCurr,bitIndex104)<->v48(VarCurr,bitIndex452))& (v220(VarCurr,bitIndex103)<->v48(VarCurr,bitIndex451))& (v220(VarCurr,bitIndex102)<->v48(VarCurr,bitIndex450))& (v220(VarCurr,bitIndex101)<->v48(VarCurr,bitIndex449))& (v220(VarCurr,bitIndex100)<->v48(VarCurr,bitIndex448))& (v220(VarCurr,bitIndex99)<->v48(VarCurr,bitIndex447))& (v220(VarCurr,bitIndex98)<->v48(VarCurr,bitIndex446))& (v220(VarCurr,bitIndex97)<->v48(VarCurr,bitIndex445))& (v220(VarCurr,bitIndex96)<->v48(VarCurr,bitIndex444))& (v220(VarCurr,bitIndex95)<->v48(VarCurr,bitIndex443))& (v220(VarCurr,bitIndex94)<->v48(VarCurr,bitIndex442))& (v220(VarCurr,bitIndex93)<->v48(VarCurr,bitIndex441))& (v220(VarCurr,bitIndex92)<->v48(VarCurr,bitIndex440))& (v220(VarCurr,bitIndex91)<->v48(VarCurr,bitIndex439))& (v220(VarCurr,bitIndex90)<->v48(VarCurr,bitIndex438))& (v220(VarCurr,bitIndex89)<->v48(VarCurr,bitIndex437))& (v220(VarCurr,bitIndex88)<->v48(VarCurr,bitIndex436))& (v220(VarCurr,bitIndex87)<->v48(VarCurr,bitIndex435))& (v220(VarCurr,bitIndex86)<->v48(VarCurr,bitIndex434))& (v220(VarCurr,bitIndex85)<->v48(VarCurr,bitIndex433))& (v220(VarCurr,bitIndex84)<->v48(VarCurr,bitIndex432))& (v220(VarCurr,bitIndex83)<->v48(VarCurr,bitIndex431))& (v220(VarCurr,bitIndex82)<->v48(VarCurr,bitIndex430))& (v220(VarCurr,bitIndex81)<->v48(VarCurr,bitIndex429))& (v220(VarCurr,bitIndex80)<->v48(VarCurr,bitIndex428))& (v220(VarCurr,bitIndex79)<->v48(VarCurr,bitIndex427))& (v220(VarCurr,bitIndex78)<->v48(VarCurr,bitIndex426))& (v220(VarCurr,bitIndex77)<->v48(VarCurr,bitIndex425))& (v220(VarCurr,bitIndex76)<->v48(VarCurr,bitIndex424))& (v220(VarCurr,bitIndex75)<->v48(VarCurr,bitIndex423))& (v220(VarCurr,bitIndex74)<->v48(VarCurr,bitIndex422))& (v220(VarCurr,bitIndex73)<->v48(VarCurr,bitIndex421))& (v220(VarCurr,bitIndex72)<->v48(VarCurr,bitIndex420))& (v220(VarCurr,bitIndex71)<->v48(VarCurr,bitIndex419))& (v220(VarCurr,bitIndex70)<->v48(VarCurr,bitIndex418))& (v220(VarCurr,bitIndex69)<->v48(VarCurr,bitIndex417))& (v220(VarCurr,bitIndex68)<->v48(VarCurr,bitIndex416))& (v220(VarCurr,bitIndex67)<->v48(VarCurr,bitIndex415))& (v220(VarCurr,bitIndex66)<->v48(VarCurr,bitIndex414))& (v220(VarCurr,bitIndex65)<->v48(VarCurr,bitIndex413))& (v220(VarCurr,bitIndex64)<->v48(VarCurr,bitIndex412))& (v220(VarCurr,bitIndex63)<->v48(VarCurr,bitIndex411))& (v220(VarCurr,bitIndex62)<->v48(VarCurr,bitIndex410))& (v220(VarCurr,bitIndex61)<->v48(VarCurr,bitIndex409))& (v220(VarCurr,bitIndex60)<->v48(VarCurr,bitIndex408))& (v220(VarCurr,bitIndex59)<->v48(VarCurr,bitIndex407))& (v220(VarCurr,bitIndex58)<->v48(VarCurr,bitIndex406))& (v220(VarCurr,bitIndex57)<->v48(VarCurr,bitIndex405))& (v220(VarCurr,bitIndex56)<->v48(VarCurr,bitIndex404))& (v220(VarCurr,bitIndex55)<->v48(VarCurr,bitIndex403))& (v220(VarCurr,bitIndex54)<->v48(VarCurr,bitIndex402))& (v220(VarCurr,bitIndex53)<->v48(VarCurr,bitIndex401))& (v220(VarCurr,bitIndex52)<->v48(VarCurr,bitIndex400))& (v220(VarCurr,bitIndex51)<->v48(VarCurr,bitIndex399))& (v220(VarCurr,bitIndex50)<->v48(VarCurr,bitIndex398))& (v220(VarCurr,bitIndex49)<->v48(VarCurr,bitIndex397))& (v220(VarCurr,bitIndex48)<->v48(VarCurr,bitIndex396))& (v220(VarCurr,bitIndex47)<->v48(VarCurr,bitIndex395))& (v220(VarCurr,bitIndex46)<->v48(VarCurr,bitIndex394))& (v220(VarCurr,bitIndex45)<->v48(VarCurr,bitIndex393))& (v220(VarCurr,bitIndex44)<->v48(VarCurr,bitIndex392))& (v220(VarCurr,bitIndex43)<->v48(VarCurr,bitIndex391))& (v220(VarCurr,bitIndex42)<->v48(VarCurr,bitIndex390))& (v220(VarCurr,bitIndex41)<->v48(VarCurr,bitIndex389))& (v220(VarCurr,bitIndex40)<->v48(VarCurr,bitIndex388))& (v220(VarCurr,bitIndex39)<->v48(VarCurr,bitIndex387))& (v220(VarCurr,bitIndex38)<->v48(VarCurr,bitIndex386))& (v220(VarCurr,bitIndex37)<->v48(VarCurr,bitIndex385))& (v220(VarCurr,bitIndex36)<->v48(VarCurr,bitIndex384))& (v220(VarCurr,bitIndex35)<->v48(VarCurr,bitIndex383))& (v220(VarCurr,bitIndex34)<->v48(VarCurr,bitIndex382))& (v220(VarCurr,bitIndex33)<->v48(VarCurr,bitIndex381))& (v220(VarCurr,bitIndex32)<->v48(VarCurr,bitIndex380))& (v220(VarCurr,bitIndex31)<->v48(VarCurr,bitIndex379))& (v220(VarCurr,bitIndex30)<->v48(VarCurr,bitIndex378))& (v220(VarCurr,bitIndex29)<->v48(VarCurr,bitIndex377))& (v220(VarCurr,bitIndex28)<->v48(VarCurr,bitIndex376))& (v220(VarCurr,bitIndex27)<->v48(VarCurr,bitIndex375))& (v220(VarCurr,bitIndex26)<->v48(VarCurr,bitIndex374))& (v220(VarCurr,bitIndex25)<->v48(VarCurr,bitIndex373))& (v220(VarCurr,bitIndex24)<->v48(VarCurr,bitIndex372))& (v220(VarCurr,bitIndex23)<->v48(VarCurr,bitIndex371))& (v220(VarCurr,bitIndex22)<->v48(VarCurr,bitIndex370))& (v220(VarCurr,bitIndex21)<->v48(VarCurr,bitIndex369))& (v220(VarCurr,bitIndex20)<->v48(VarCurr,bitIndex368))& (v220(VarCurr,bitIndex19)<->v48(VarCurr,bitIndex367))& (v220(VarCurr,bitIndex18)<->v48(VarCurr,bitIndex366))& (v220(VarCurr,bitIndex17)<->v48(VarCurr,bitIndex365))& (v220(VarCurr,bitIndex16)<->v48(VarCurr,bitIndex364))& (v220(VarCurr,bitIndex15)<->v48(VarCurr,bitIndex363))& (v220(VarCurr,bitIndex14)<->v48(VarCurr,bitIndex362))& (v220(VarCurr,bitIndex13)<->v48(VarCurr,bitIndex361))& (v220(VarCurr,bitIndex12)<->v48(VarCurr,bitIndex360))& (v220(VarCurr,bitIndex11)<->v48(VarCurr,bitIndex359))& (v220(VarCurr,bitIndex10)<->v48(VarCurr,bitIndex358))& (v220(VarCurr,bitIndex9)<->v48(VarCurr,bitIndex357))& (v220(VarCurr,bitIndex8)<->v48(VarCurr,bitIndex356))& (v220(VarCurr,bitIndex7)<->v48(VarCurr,bitIndex355))& (v220(VarCurr,bitIndex6)<->v48(VarCurr,bitIndex354))& (v220(VarCurr,bitIndex5)<->v48(VarCurr,bitIndex353))& (v220(VarCurr,bitIndex4)<->v48(VarCurr,bitIndex352))& (v220(VarCurr,bitIndex3)<->v48(VarCurr,bitIndex351))& (v220(VarCurr,bitIndex2)<->v48(VarCurr,bitIndex350))& (v220(VarCurr,bitIndex1)<->v48(VarCurr,bitIndex349))& (v220(VarCurr,bitIndex0)<->v48(VarCurr,bitIndex348))).
% 78.37/77.78  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v203(VarNext)<->v204(VarNext)&v213(VarNext))).
% 78.37/77.78  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v213(VarNext)<->v211(VarCurr))).
% 78.37/77.78  all VarCurr (v211(VarCurr)<->v214(VarCurr)|v215(VarCurr)).
% 78.37/77.78  all VarCurr (v215(VarCurr)<->v57(VarCurr,bitIndex1)&v216(VarCurr)).
% 78.37/77.78  all VarCurr (-v216(VarCurr)<->v214(VarCurr)).
% 78.37/77.78  all VarCurr (-v214(VarCurr)<->v50(VarCurr)).
% 78.37/77.78  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v204(VarNext)<->v205(VarNext)&v188(VarNext))).
% 78.37/77.78  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v205(VarNext)<->v207(VarNext))).
% 78.37/77.78  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v207(VarNext)<->v188(VarCurr))).
% 78.37/77.78  -v48(constB0,bitIndex695).
% 78.37/77.78  -v48(constB0,bitIndex694).
% 78.37/77.78  -v48(constB0,bitIndex693).
% 78.37/77.78  -v48(constB0,bitIndex692).
% 78.37/77.78  -v48(constB0,bitIndex691).
% 78.37/77.78  -v48(constB0,bitIndex690).
% 78.37/77.78  -v48(constB0,bitIndex689).
% 78.37/77.78  -v48(constB0,bitIndex681).
% 78.37/77.78  -v48(constB0,bitIndex680).
% 78.37/77.78  -v48(constB0,bitIndex679).
% 78.37/77.78  -v48(constB0,bitIndex678).
% 78.37/77.78  -v48(constB0,bitIndex677).
% 78.37/77.78  -v48(constB0,bitIndex676).
% 78.37/77.78  -v48(constB0,bitIndex675).
% 78.37/77.78  -v48(constB0,bitIndex674).
% 78.37/77.78  -v48(constB0,bitIndex641).
% 78.37/77.78  -v48(constB0,bitIndex640).
% 78.37/77.78  -v48(constB0,bitIndex639).
% 78.37/77.78  -v48(constB0,bitIndex638).
% 78.37/77.78  -v48(constB0,bitIndex637).
% 78.37/77.78  -v48(constB0,bitIndex636).
% 78.37/77.78  -v48(constB0,bitIndex635).
% 78.37/77.78  -v48(constB0,bitIndex634).
% 78.37/77.78  -v48(constB0,bitIndex633).
% 78.37/77.78  -v48(constB0,bitIndex632).
% 78.37/77.78  -v48(constB0,bitIndex631).
% 78.37/77.78  -v48(constB0,bitIndex630).
% 78.37/77.78  -v48(constB0,bitIndex629).
% 78.37/77.78  -v48(constB0,bitIndex628).
% 78.37/77.78  -v48(constB0,bitIndex627).
% 78.37/77.78  -v48(constB0,bitIndex626).
% 78.37/77.78  -v48(constB0,bitIndex625).
% 78.37/77.78  -v48(constB0,bitIndex624).
% 78.37/77.78  -v48(constB0,bitIndex623).
% 78.37/77.78  -v48(constB0,bitIndex622).
% 78.37/77.78  -v48(constB0,bitIndex621).
% 78.37/77.78  -v48(constB0,bitIndex620).
% 78.37/77.78  -v48(constB0,bitIndex619).
% 78.37/77.78  -v48(constB0,bitIndex618).
% 78.37/77.78  -v48(constB0,bitIndex617).
% 78.37/77.78  -v48(constB0,bitIndex616).
% 78.37/77.78  -v48(constB0,bitIndex615).
% 78.37/77.78  -v48(constB0,bitIndex614).
% 78.37/77.78  -v48(constB0,bitIndex613).
% 78.37/77.78  -v48(constB0,bitIndex612).
% 78.37/77.78  -v48(constB0,bitIndex611).
% 78.37/77.78  -v48(constB0,bitIndex610).
% 78.37/77.78  -v48(constB0,bitIndex609).
% 78.37/77.78  -v48(constB0,bitIndex608).
% 78.37/77.78  -v48(constB0,bitIndex607).
% 78.37/77.78  -v48(constB0,bitIndex606).
% 78.37/77.78  -v48(constB0,bitIndex605).
% 78.37/77.78  -v48(constB0,bitIndex604).
% 78.37/77.78  -v48(constB0,bitIndex603).
% 78.37/77.78  -v48(constB0,bitIndex602).
% 78.37/77.78  -v48(constB0,bitIndex601).
% 78.37/77.78  -v48(constB0,bitIndex600).
% 78.37/77.78  -v48(constB0,bitIndex599).
% 78.37/77.78  -v48(constB0,bitIndex598).
% 78.37/77.78  -v48(constB0,bitIndex597).
% 78.37/77.78  -v48(constB0,bitIndex596).
% 78.37/77.78  -v48(constB0,bitIndex595).
% 78.37/77.78  -v48(constB0,bitIndex594).
% 78.37/77.78  -v48(constB0,bitIndex583).
% 78.37/77.78  -v48(constB0,bitIndex582).
% 78.37/77.78  -v48(constB0,bitIndex581).
% 78.37/77.78  -v48(constB0,bitIndex580).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex115).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex114).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex113).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex112).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex111).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex110).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex109).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex101).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex100).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex99).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex98).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex97).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex96).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex95).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex94).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex61).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex60).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex59).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex58).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex57).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex56).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex55).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex54).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex53).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex52).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex51).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex50).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex49).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex48).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex47).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex46).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex45).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex44).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex43).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex42).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex41).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex40).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex39).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex38).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex37).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex36).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex35).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex34).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex33).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex32).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex31).
% 78.37/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex30).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex29).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex28).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex27).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex26).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex25).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex24).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex23).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex22).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex21).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex20).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex19).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex18).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex17).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex16).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex15).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex14).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex3).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex2).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex1).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex0).
% 78.46/77.78  -v48(constB0,bitIndex579).
% 78.46/77.78  -v48(constB0,bitIndex578).
% 78.46/77.78  -v48(constB0,bitIndex577).
% 78.46/77.78  -v48(constB0,bitIndex576).
% 78.46/77.78  -v48(constB0,bitIndex575).
% 78.46/77.78  -v48(constB0,bitIndex574).
% 78.46/77.78  -v48(constB0,bitIndex573).
% 78.46/77.78  -v48(constB0,bitIndex565).
% 78.46/77.78  -v48(constB0,bitIndex564).
% 78.46/77.78  -v48(constB0,bitIndex563).
% 78.46/77.78  -v48(constB0,bitIndex562).
% 78.46/77.78  -v48(constB0,bitIndex561).
% 78.46/77.78  -v48(constB0,bitIndex560).
% 78.46/77.78  -v48(constB0,bitIndex559).
% 78.46/77.78  -v48(constB0,bitIndex558).
% 78.46/77.78  -v48(constB0,bitIndex525).
% 78.46/77.78  -v48(constB0,bitIndex524).
% 78.46/77.78  -v48(constB0,bitIndex523).
% 78.46/77.78  -v48(constB0,bitIndex522).
% 78.46/77.78  -v48(constB0,bitIndex521).
% 78.46/77.78  -v48(constB0,bitIndex520).
% 78.46/77.78  -v48(constB0,bitIndex519).
% 78.46/77.78  -v48(constB0,bitIndex518).
% 78.46/77.78  -v48(constB0,bitIndex517).
% 78.46/77.78  -v48(constB0,bitIndex516).
% 78.46/77.78  -v48(constB0,bitIndex515).
% 78.46/77.78  -v48(constB0,bitIndex514).
% 78.46/77.78  -v48(constB0,bitIndex513).
% 78.46/77.78  -v48(constB0,bitIndex512).
% 78.46/77.78  -v48(constB0,bitIndex511).
% 78.46/77.78  -v48(constB0,bitIndex510).
% 78.46/77.78  -v48(constB0,bitIndex509).
% 78.46/77.78  -v48(constB0,bitIndex508).
% 78.46/77.78  -v48(constB0,bitIndex507).
% 78.46/77.78  -v48(constB0,bitIndex506).
% 78.46/77.78  -v48(constB0,bitIndex505).
% 78.46/77.78  -v48(constB0,bitIndex504).
% 78.46/77.78  -v48(constB0,bitIndex503).
% 78.46/77.78  -v48(constB0,bitIndex502).
% 78.46/77.78  -v48(constB0,bitIndex501).
% 78.46/77.78  -v48(constB0,bitIndex500).
% 78.46/77.78  -v48(constB0,bitIndex499).
% 78.46/77.78  -v48(constB0,bitIndex498).
% 78.46/77.78  -v48(constB0,bitIndex497).
% 78.46/77.78  -v48(constB0,bitIndex496).
% 78.46/77.78  -v48(constB0,bitIndex495).
% 78.46/77.78  -v48(constB0,bitIndex494).
% 78.46/77.78  -v48(constB0,bitIndex493).
% 78.46/77.78  -v48(constB0,bitIndex492).
% 78.46/77.78  -v48(constB0,bitIndex491).
% 78.46/77.78  -v48(constB0,bitIndex490).
% 78.46/77.78  -v48(constB0,bitIndex489).
% 78.46/77.78  -v48(constB0,bitIndex488).
% 78.46/77.78  -v48(constB0,bitIndex487).
% 78.46/77.78  -v48(constB0,bitIndex486).
% 78.46/77.78  -v48(constB0,bitIndex485).
% 78.46/77.78  -v48(constB0,bitIndex484).
% 78.46/77.78  -v48(constB0,bitIndex483).
% 78.46/77.78  -v48(constB0,bitIndex482).
% 78.46/77.78  -v48(constB0,bitIndex481).
% 78.46/77.78  -v48(constB0,bitIndex480).
% 78.46/77.78  -v48(constB0,bitIndex479).
% 78.46/77.78  -v48(constB0,bitIndex478).
% 78.46/77.78  -v48(constB0,bitIndex467).
% 78.46/77.78  -v48(constB0,bitIndex466).
% 78.46/77.78  -v48(constB0,bitIndex465).
% 78.46/77.78  -v48(constB0,bitIndex464).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex115).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex114).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex113).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex112).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex111).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex110).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex109).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex101).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex100).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex99).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex98).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex97).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex96).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex95).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex94).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex61).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex60).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex59).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex58).
% 78.46/77.78  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex57).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex56).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex55).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex54).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex53).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex52).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex51).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex50).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex49).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex48).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex47).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex46).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex45).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex44).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex43).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex42).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex41).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex40).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex39).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex38).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex37).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex36).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex35).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex34).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex33).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex32).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex31).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex30).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex29).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex28).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex27).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex26).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex25).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex24).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex23).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex22).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex21).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex20).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex19).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex18).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex17).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex16).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex15).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex14).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex3).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex2).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex1).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex0).
% 78.46/77.79  -v48(constB0,bitIndex463).
% 78.46/77.79  -v48(constB0,bitIndex462).
% 78.46/77.79  -v48(constB0,bitIndex461).
% 78.46/77.79  -v48(constB0,bitIndex460).
% 78.46/77.79  -v48(constB0,bitIndex459).
% 78.46/77.79  -v48(constB0,bitIndex458).
% 78.46/77.79  -v48(constB0,bitIndex457).
% 78.46/77.79  -v48(constB0,bitIndex449).
% 78.46/77.79  -v48(constB0,bitIndex448).
% 78.46/77.79  -v48(constB0,bitIndex447).
% 78.46/77.79  -v48(constB0,bitIndex446).
% 78.46/77.79  -v48(constB0,bitIndex445).
% 78.46/77.79  -v48(constB0,bitIndex444).
% 78.46/77.79  -v48(constB0,bitIndex443).
% 78.46/77.79  -v48(constB0,bitIndex442).
% 78.46/77.79  -v48(constB0,bitIndex409).
% 78.46/77.79  -v48(constB0,bitIndex408).
% 78.46/77.79  -v48(constB0,bitIndex407).
% 78.46/77.79  -v48(constB0,bitIndex406).
% 78.46/77.79  -v48(constB0,bitIndex405).
% 78.46/77.79  -v48(constB0,bitIndex404).
% 78.46/77.79  -v48(constB0,bitIndex403).
% 78.46/77.79  -v48(constB0,bitIndex402).
% 78.46/77.79  -v48(constB0,bitIndex401).
% 78.46/77.79  -v48(constB0,bitIndex400).
% 78.46/77.79  -v48(constB0,bitIndex399).
% 78.46/77.79  -v48(constB0,bitIndex398).
% 78.46/77.79  -v48(constB0,bitIndex397).
% 78.46/77.79  -v48(constB0,bitIndex396).
% 78.46/77.79  -v48(constB0,bitIndex395).
% 78.46/77.79  -v48(constB0,bitIndex394).
% 78.46/77.79  -v48(constB0,bitIndex393).
% 78.46/77.79  -v48(constB0,bitIndex392).
% 78.46/77.79  -v48(constB0,bitIndex391).
% 78.46/77.79  -v48(constB0,bitIndex390).
% 78.46/77.79  -v48(constB0,bitIndex389).
% 78.46/77.79  -v48(constB0,bitIndex388).
% 78.46/77.79  -v48(constB0,bitIndex387).
% 78.46/77.79  -v48(constB0,bitIndex386).
% 78.46/77.79  -v48(constB0,bitIndex385).
% 78.46/77.79  -v48(constB0,bitIndex384).
% 78.46/77.79  -v48(constB0,bitIndex383).
% 78.46/77.79  -v48(constB0,bitIndex382).
% 78.46/77.79  -v48(constB0,bitIndex381).
% 78.46/77.79  -v48(constB0,bitIndex380).
% 78.46/77.79  -v48(constB0,bitIndex379).
% 78.46/77.79  -v48(constB0,bitIndex378).
% 78.46/77.79  -v48(constB0,bitIndex377).
% 78.46/77.79  -v48(constB0,bitIndex376).
% 78.46/77.79  -v48(constB0,bitIndex375).
% 78.46/77.79  -v48(constB0,bitIndex374).
% 78.46/77.79  -v48(constB0,bitIndex373).
% 78.46/77.79  -v48(constB0,bitIndex372).
% 78.46/77.79  -v48(constB0,bitIndex371).
% 78.46/77.79  -v48(constB0,bitIndex370).
% 78.46/77.79  -v48(constB0,bitIndex369).
% 78.46/77.79  -v48(constB0,bitIndex368).
% 78.46/77.79  -v48(constB0,bitIndex367).
% 78.46/77.79  -v48(constB0,bitIndex366).
% 78.46/77.79  -v48(constB0,bitIndex365).
% 78.46/77.79  -v48(constB0,bitIndex364).
% 78.46/77.79  -v48(constB0,bitIndex363).
% 78.46/77.79  -v48(constB0,bitIndex362).
% 78.46/77.79  -v48(constB0,bitIndex351).
% 78.46/77.79  -v48(constB0,bitIndex350).
% 78.46/77.79  -v48(constB0,bitIndex349).
% 78.46/77.79  -v48(constB0,bitIndex348).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex115).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex114).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex113).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex112).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex111).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex110).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex109).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex101).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex100).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex99).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex98).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex97).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex96).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex95).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex94).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex61).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex60).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex59).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex58).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex57).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex56).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex55).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex54).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex53).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex52).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex51).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex50).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex49).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex48).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex47).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex46).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex45).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex44).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex43).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex42).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex41).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex40).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex39).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex38).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex37).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex36).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex35).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex34).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex33).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex32).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex31).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex30).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex29).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex28).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex27).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex26).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex25).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex24).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex23).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex22).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex21).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex20).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex19).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex18).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex17).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex16).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex15).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex14).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex3).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex2).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex1).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex0).
% 78.46/77.79  -v48(constB0,bitIndex347).
% 78.46/77.79  -v48(constB0,bitIndex346).
% 78.46/77.79  -v48(constB0,bitIndex345).
% 78.46/77.79  -v48(constB0,bitIndex344).
% 78.46/77.79  -v48(constB0,bitIndex343).
% 78.46/77.79  -v48(constB0,bitIndex342).
% 78.46/77.79  -v48(constB0,bitIndex341).
% 78.46/77.79  -v48(constB0,bitIndex333).
% 78.46/77.79  -v48(constB0,bitIndex332).
% 78.46/77.79  -v48(constB0,bitIndex331).
% 78.46/77.79  -v48(constB0,bitIndex330).
% 78.46/77.79  -v48(constB0,bitIndex329).
% 78.46/77.79  -v48(constB0,bitIndex328).
% 78.46/77.79  -v48(constB0,bitIndex327).
% 78.46/77.79  -v48(constB0,bitIndex326).
% 78.46/77.79  -v48(constB0,bitIndex293).
% 78.46/77.79  -v48(constB0,bitIndex292).
% 78.46/77.79  -v48(constB0,bitIndex291).
% 78.46/77.79  -v48(constB0,bitIndex290).
% 78.46/77.79  -v48(constB0,bitIndex289).
% 78.46/77.79  -v48(constB0,bitIndex288).
% 78.46/77.79  -v48(constB0,bitIndex287).
% 78.46/77.79  -v48(constB0,bitIndex286).
% 78.46/77.79  -v48(constB0,bitIndex285).
% 78.46/77.79  -v48(constB0,bitIndex284).
% 78.46/77.79  -v48(constB0,bitIndex283).
% 78.46/77.79  -v48(constB0,bitIndex282).
% 78.46/77.79  -v48(constB0,bitIndex281).
% 78.46/77.79  -v48(constB0,bitIndex280).
% 78.46/77.79  -v48(constB0,bitIndex279).
% 78.46/77.79  -v48(constB0,bitIndex278).
% 78.46/77.79  -v48(constB0,bitIndex277).
% 78.46/77.79  -v48(constB0,bitIndex276).
% 78.46/77.79  -v48(constB0,bitIndex275).
% 78.46/77.79  -v48(constB0,bitIndex274).
% 78.46/77.79  -v48(constB0,bitIndex273).
% 78.46/77.79  -v48(constB0,bitIndex272).
% 78.46/77.79  -v48(constB0,bitIndex271).
% 78.46/77.79  -v48(constB0,bitIndex270).
% 78.46/77.79  -v48(constB0,bitIndex269).
% 78.46/77.79  -v48(constB0,bitIndex268).
% 78.46/77.79  -v48(constB0,bitIndex267).
% 78.46/77.79  -v48(constB0,bitIndex266).
% 78.46/77.79  -v48(constB0,bitIndex265).
% 78.46/77.79  -v48(constB0,bitIndex264).
% 78.46/77.79  -v48(constB0,bitIndex263).
% 78.46/77.79  -v48(constB0,bitIndex262).
% 78.46/77.79  -v48(constB0,bitIndex261).
% 78.46/77.79  -v48(constB0,bitIndex260).
% 78.46/77.79  -v48(constB0,bitIndex259).
% 78.46/77.79  -v48(constB0,bitIndex258).
% 78.46/77.79  -v48(constB0,bitIndex257).
% 78.46/77.79  -v48(constB0,bitIndex256).
% 78.46/77.79  -v48(constB0,bitIndex255).
% 78.46/77.79  -v48(constB0,bitIndex254).
% 78.46/77.79  -v48(constB0,bitIndex253).
% 78.46/77.79  -v48(constB0,bitIndex252).
% 78.46/77.79  -v48(constB0,bitIndex251).
% 78.46/77.79  -v48(constB0,bitIndex250).
% 78.46/77.79  -v48(constB0,bitIndex249).
% 78.46/77.79  -v48(constB0,bitIndex248).
% 78.46/77.79  -v48(constB0,bitIndex247).
% 78.46/77.79  -v48(constB0,bitIndex246).
% 78.46/77.79  -v48(constB0,bitIndex235).
% 78.46/77.79  -v48(constB0,bitIndex234).
% 78.46/77.79  -v48(constB0,bitIndex233).
% 78.46/77.79  -v48(constB0,bitIndex232).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex115).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex114).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex113).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex112).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex111).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex110).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex109).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex101).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex100).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex99).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex98).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex97).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex96).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex95).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex94).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex61).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex60).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex59).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex58).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex57).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex56).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex55).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex54).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex53).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex52).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex51).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex50).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex49).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex48).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex47).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex46).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex45).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex44).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex43).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex42).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex41).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex40).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex39).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex38).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex37).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex36).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex35).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex34).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex33).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex32).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex31).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex30).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex29).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex28).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex27).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex26).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex25).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex24).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex23).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex22).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex21).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex20).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex19).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex18).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex17).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex16).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex15).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex14).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex3).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex2).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex1).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex0).
% 78.46/77.79  -v48(constB0,bitIndex231).
% 78.46/77.79  -v48(constB0,bitIndex230).
% 78.46/77.79  -v48(constB0,bitIndex229).
% 78.46/77.79  -v48(constB0,bitIndex228).
% 78.46/77.79  -v48(constB0,bitIndex227).
% 78.46/77.79  -v48(constB0,bitIndex226).
% 78.46/77.79  -v48(constB0,bitIndex225).
% 78.46/77.79  -v48(constB0,bitIndex217).
% 78.46/77.79  -v48(constB0,bitIndex216).
% 78.46/77.79  -v48(constB0,bitIndex215).
% 78.46/77.79  -v48(constB0,bitIndex214).
% 78.46/77.79  -v48(constB0,bitIndex213).
% 78.46/77.79  -v48(constB0,bitIndex212).
% 78.46/77.79  -v48(constB0,bitIndex211).
% 78.46/77.79  -v48(constB0,bitIndex210).
% 78.46/77.79  -v48(constB0,bitIndex177).
% 78.46/77.79  -v48(constB0,bitIndex176).
% 78.46/77.79  -v48(constB0,bitIndex175).
% 78.46/77.79  -v48(constB0,bitIndex174).
% 78.46/77.79  -v48(constB0,bitIndex173).
% 78.46/77.79  -v48(constB0,bitIndex172).
% 78.46/77.79  -v48(constB0,bitIndex171).
% 78.46/77.79  -v48(constB0,bitIndex170).
% 78.46/77.79  -v48(constB0,bitIndex169).
% 78.46/77.79  -v48(constB0,bitIndex168).
% 78.46/77.79  -v48(constB0,bitIndex167).
% 78.46/77.79  -v48(constB0,bitIndex166).
% 78.46/77.79  -v48(constB0,bitIndex165).
% 78.46/77.79  -v48(constB0,bitIndex164).
% 78.46/77.79  -v48(constB0,bitIndex163).
% 78.46/77.79  -v48(constB0,bitIndex162).
% 78.46/77.79  -v48(constB0,bitIndex161).
% 78.46/77.79  -v48(constB0,bitIndex160).
% 78.46/77.79  -v48(constB0,bitIndex159).
% 78.46/77.79  -v48(constB0,bitIndex158).
% 78.46/77.79  -v48(constB0,bitIndex157).
% 78.46/77.79  -v48(constB0,bitIndex156).
% 78.46/77.79  -v48(constB0,bitIndex155).
% 78.46/77.79  -v48(constB0,bitIndex154).
% 78.46/77.79  -v48(constB0,bitIndex153).
% 78.46/77.79  -v48(constB0,bitIndex152).
% 78.46/77.79  -v48(constB0,bitIndex151).
% 78.46/77.79  -v48(constB0,bitIndex150).
% 78.46/77.79  -v48(constB0,bitIndex149).
% 78.46/77.79  -v48(constB0,bitIndex148).
% 78.46/77.79  -v48(constB0,bitIndex147).
% 78.46/77.79  -v48(constB0,bitIndex146).
% 78.46/77.79  -v48(constB0,bitIndex145).
% 78.46/77.79  -v48(constB0,bitIndex144).
% 78.46/77.79  -v48(constB0,bitIndex143).
% 78.46/77.79  -v48(constB0,bitIndex142).
% 78.46/77.79  -v48(constB0,bitIndex141).
% 78.46/77.79  -v48(constB0,bitIndex140).
% 78.46/77.79  -v48(constB0,bitIndex139).
% 78.46/77.79  -v48(constB0,bitIndex138).
% 78.46/77.79  -v48(constB0,bitIndex137).
% 78.46/77.79  -v48(constB0,bitIndex136).
% 78.46/77.79  -v48(constB0,bitIndex135).
% 78.46/77.79  -v48(constB0,bitIndex134).
% 78.46/77.79  -v48(constB0,bitIndex133).
% 78.46/77.79  -v48(constB0,bitIndex132).
% 78.46/77.79  -v48(constB0,bitIndex131).
% 78.46/77.79  -v48(constB0,bitIndex130).
% 78.46/77.79  -v48(constB0,bitIndex119).
% 78.46/77.79  -v48(constB0,bitIndex118).
% 78.46/77.79  -v48(constB0,bitIndex117).
% 78.46/77.79  -v48(constB0,bitIndex116).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex115).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex114).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex113).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex112).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex111).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex110).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex109).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex101).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex100).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex99).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex98).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex97).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex96).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex95).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex94).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex61).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex60).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex59).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex58).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex57).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex56).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex55).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex54).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex53).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex52).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex51).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex50).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex49).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex48).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex47).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex46).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex45).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex44).
% 78.46/77.79  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex43).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex42).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex41).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex40).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex39).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex38).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex37).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex36).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex35).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex34).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex33).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex32).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex31).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex30).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex29).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex28).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex27).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex26).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex25).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex24).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex23).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex22).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex21).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex20).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex19).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex18).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex17).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex16).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex15).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex14).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex3).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex2).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex1).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex0).
% 78.46/77.80  -v48(constB0,bitIndex115).
% 78.46/77.80  -v48(constB0,bitIndex114).
% 78.46/77.80  -v48(constB0,bitIndex113).
% 78.46/77.80  -v48(constB0,bitIndex112).
% 78.46/77.80  -v48(constB0,bitIndex111).
% 78.46/77.80  -v48(constB0,bitIndex110).
% 78.46/77.80  -v48(constB0,bitIndex109).
% 78.46/77.80  -v48(constB0,bitIndex101).
% 78.46/77.80  -v48(constB0,bitIndex100).
% 78.46/77.80  -v48(constB0,bitIndex99).
% 78.46/77.80  -v48(constB0,bitIndex98).
% 78.46/77.80  -v48(constB0,bitIndex97).
% 78.46/77.80  -v48(constB0,bitIndex96).
% 78.46/77.80  -v48(constB0,bitIndex95).
% 78.46/77.80  -v48(constB0,bitIndex94).
% 78.46/77.80  -v48(constB0,bitIndex61).
% 78.46/77.80  -v48(constB0,bitIndex60).
% 78.46/77.80  -v48(constB0,bitIndex59).
% 78.46/77.80  -v48(constB0,bitIndex58).
% 78.46/77.80  -v48(constB0,bitIndex57).
% 78.46/77.80  -v48(constB0,bitIndex56).
% 78.46/77.80  -v48(constB0,bitIndex55).
% 78.46/77.80  -v48(constB0,bitIndex54).
% 78.46/77.80  -v48(constB0,bitIndex53).
% 78.46/77.80  -v48(constB0,bitIndex52).
% 78.46/77.80  -v48(constB0,bitIndex51).
% 78.46/77.80  -v48(constB0,bitIndex50).
% 78.46/77.80  -v48(constB0,bitIndex49).
% 78.46/77.80  -v48(constB0,bitIndex48).
% 78.46/77.80  -v48(constB0,bitIndex47).
% 78.46/77.80  -v48(constB0,bitIndex46).
% 78.46/77.80  -v48(constB0,bitIndex45).
% 78.46/77.80  -v48(constB0,bitIndex44).
% 78.46/77.80  -v48(constB0,bitIndex43).
% 78.46/77.80  -v48(constB0,bitIndex42).
% 78.46/77.80  -v48(constB0,bitIndex41).
% 78.46/77.80  -v48(constB0,bitIndex40).
% 78.46/77.80  -v48(constB0,bitIndex39).
% 78.46/77.80  -v48(constB0,bitIndex38).
% 78.46/77.80  -v48(constB0,bitIndex37).
% 78.46/77.80  -v48(constB0,bitIndex36).
% 78.46/77.80  -v48(constB0,bitIndex35).
% 78.46/77.80  -v48(constB0,bitIndex34).
% 78.46/77.80  -v48(constB0,bitIndex33).
% 78.46/77.80  -v48(constB0,bitIndex32).
% 78.46/77.80  -v48(constB0,bitIndex31).
% 78.46/77.80  -v48(constB0,bitIndex30).
% 78.46/77.80  -v48(constB0,bitIndex29).
% 78.46/77.80  -v48(constB0,bitIndex28).
% 78.46/77.80  -v48(constB0,bitIndex27).
% 78.46/77.80  -v48(constB0,bitIndex26).
% 78.46/77.80  -v48(constB0,bitIndex25).
% 78.46/77.80  -v48(constB0,bitIndex24).
% 78.46/77.80  -v48(constB0,bitIndex23).
% 78.46/77.80  -v48(constB0,bitIndex22).
% 78.46/77.80  -v48(constB0,bitIndex21).
% 78.46/77.80  -v48(constB0,bitIndex20).
% 78.46/77.80  -v48(constB0,bitIndex19).
% 78.46/77.80  -v48(constB0,bitIndex18).
% 78.46/77.80  -v48(constB0,bitIndex17).
% 78.46/77.80  -v48(constB0,bitIndex16).
% 78.46/77.80  -v48(constB0,bitIndex15).
% 78.46/77.80  -v48(constB0,bitIndex14).
% 78.46/77.80  -v48(constB0,bitIndex3).
% 78.46/77.80  -v48(constB0,bitIndex2).
% 78.46/77.80  -v48(constB0,bitIndex1).
% 78.46/77.80  -v48(constB0,bitIndex0).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex115).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex114).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex113).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex112).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex111).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex110).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex109).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex101).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex100).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex99).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex98).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex97).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex96).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex95).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex94).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex61).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex60).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex59).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex58).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex57).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex56).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex55).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex54).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex53).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex52).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex51).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex50).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex49).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex48).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex47).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex46).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex45).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex44).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex43).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex42).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex41).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex40).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex39).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex38).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex37).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex36).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex35).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex34).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex33).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex32).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex31).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex30).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex29).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex28).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex27).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex26).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex25).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex24).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex23).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex22).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex21).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex20).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex19).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex18).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex17).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex16).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex15).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex14).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex3).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex2).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex1).
% 78.46/77.80  -b0000000xxxxxxx00000000xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx000000000000000000000000000000000000000000000000xxxxxxxxxx0000(bitIndex0).
% 78.46/77.80  all VarCurr (v188(VarCurr)<->v122(VarCurr)).
% 78.46/77.80  all VarCurr B (range_115_109(B)-> (v180(VarCurr,B)<->v182(VarCurr,B))).
% 78.46/77.80  all VarCurr B (range_115_109(B)-> (v182(VarCurr,B)<->v184(VarCurr,B))).
% 78.46/77.80  all VarCurr B (range_115_109(B)-> (v184(VarCurr,B)<->v186(VarCurr,B))).
% 78.46/77.80  all B (range_115_109(B)<->bitIndex109=B|bitIndex110=B|bitIndex111=B|bitIndex112=B|bitIndex113=B|bitIndex114=B|bitIndex115=B).
% 78.46/77.80  all VarCurr (v161(VarCurr,bitIndex1)<->v163(VarCurr,bitIndex1)).
% 78.46/77.80  all VarCurr (v163(VarCurr,bitIndex1)<->v165(VarCurr,bitIndex1)).
% 78.46/77.80  all VarCurr (v165(VarCurr,bitIndex1)<->v97(VarCurr,bitIndex2)).
% 78.46/77.80  all VarCurr (v57(VarCurr,bitIndex1)<->v59(VarCurr,bitIndex1)).
% 78.46/77.80  all VarCurr (v59(VarCurr,bitIndex1)<->v61(VarCurr,bitIndex1)).
% 78.46/77.80  all VarCurr (v61(VarCurr,bitIndex1)<->v166(VarCurr,bitIndex1)).
% 78.46/77.80  all VarCurr (-v167(VarCurr)& -v172(VarCurr)& -v174(VarCurr)-> (all B (range_5_0(B)-> (v166(VarCurr,B)<->v97(VarCurr,B))))).
% 78.46/77.80  all VarCurr (v174(VarCurr)-> (all B (range_5_0(B)-> (v166(VarCurr,B)<->v176(VarCurr,B))))).
% 78.46/77.80  all VarCurr (v172(VarCurr)-> (all B (range_5_0(B)-> (v166(VarCurr,B)<->v97(VarCurr,B))))).
% 78.46/77.80  all VarCurr (v167(VarCurr)-> (all B (range_5_0(B)-> (v166(VarCurr,B)<->v169(VarCurr,B))))).
% 78.46/77.80  all VarCurr (v177(VarCurr)<-> (v178(VarCurr,bitIndex1)<->$T)& (v178(VarCurr,bitIndex0)<->$T)).
% 78.46/77.80  all VarCurr (v178(VarCurr,bitIndex0)<->v71(VarCurr)).
% 78.46/77.80  all VarCurr (v178(VarCurr,bitIndex1)<->v63(VarCurr)).
% 78.46/77.80  all VarCurr B (range_5_0(B)-> (v176(VarCurr,B)<-> -v97(VarCurr,B))).
% 103.77/103.10  all VarCurr (v174(VarCurr)<-> (v175(VarCurr,bitIndex1)<->$T)& (v175(VarCurr,bitIndex0)<->$F)).
% 103.77/103.10  all VarCurr (v175(VarCurr,bitIndex0)<->v71(VarCurr)).
% 103.77/103.10  all VarCurr (v175(VarCurr,bitIndex1)<->v63(VarCurr)).
% 103.77/103.10  all VarCurr (v172(VarCurr)<-> (v173(VarCurr,bitIndex1)<->$F)& (v173(VarCurr,bitIndex0)<->$T)).
% 103.77/103.10  all VarCurr (v173(VarCurr,bitIndex0)<->v71(VarCurr)).
% 103.77/103.10  all VarCurr (v173(VarCurr,bitIndex1)<->v63(VarCurr)).
% 103.77/103.10  all VarCurr B (range_5_0(B)-> (v169(VarCurr,B)<-> -v170(VarCurr,B))).
% 103.77/103.10  all VarCurr (v170(VarCurr,bitIndex0)<->v171(VarCurr)).
% 103.77/103.10  all VarCurr (v170(VarCurr,bitIndex1)<->v171(VarCurr)).
% 103.77/103.10  all VarCurr (v170(VarCurr,bitIndex2)<->v171(VarCurr)).
% 103.77/103.10  all VarCurr (v170(VarCurr,bitIndex3)<->v171(VarCurr)).
% 103.77/103.10  all VarCurr (v170(VarCurr,bitIndex4)<->v171(VarCurr)).
% 103.77/103.10  all VarCurr (v170(VarCurr,bitIndex5)<->v171(VarCurr)).
% 103.77/103.10  all VarCurr (v171(VarCurr)<->v99(VarCurr)).
% 103.77/103.10  all VarCurr (v167(VarCurr)<-> (v168(VarCurr,bitIndex1)<->$F)& (v168(VarCurr,bitIndex0)<->$F)).
% 103.77/103.10  all VarCurr (v168(VarCurr,bitIndex0)<->v71(VarCurr)).
% 103.77/103.10  all VarCurr (v168(VarCurr,bitIndex1)<->v63(VarCurr)).
% 103.77/103.10  all VarCurr (v161(VarCurr,bitIndex0)<->v163(VarCurr,bitIndex0)).
% 103.77/103.10  all VarCurr (v163(VarCurr,bitIndex0)<->v165(VarCurr,bitIndex0)).
% 103.77/103.10  all VarCurr (v165(VarCurr,bitIndex0)<->v97(VarCurr,bitIndex1)).
% 103.77/103.10  all VarCurr (v83(VarCurr)<->v85(VarCurr)).
% 103.77/103.10  all VarCurr (v85(VarCurr)<->v87(VarCurr)).
% 103.77/103.10  all VarCurr (v87(VarCurr)<->v89(VarCurr)).
% 103.77/103.10  all VarCurr (v89(VarCurr)<->v91(VarCurr)).
% 103.77/103.10  all VarCurr (-v91(VarCurr)<->v93(VarCurr,bitIndex0)).
% 103.77/103.10  all VarCurr (v93(VarCurr,bitIndex0)<->v95(VarCurr,bitIndex0)).
% 103.77/103.10  all VarCurr (v95(VarCurr,bitIndex0)<->v97(VarCurr,bitIndex0)).
% 103.77/103.10  all VarNext (v97(VarNext,bitIndex0)<->v142(VarNext,bitIndex0)).
% 103.77/103.10  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v143(VarNext)-> (all B (range_5_0(B)-> (v142(VarNext,B)<->v97(VarCurr,B)))))).
% 103.77/103.10  all VarNext (v143(VarNext)-> (all B (range_5_0(B)-> (v142(VarNext,B)<->v137(VarNext,B))))).
% 103.77/103.10  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v143(VarNext)<->v144(VarNext))).
% 103.77/103.10  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v144(VarNext)<->v146(VarNext)&v120(VarNext))).
% 103.77/103.10  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v146(VarNext)<->v131(VarNext))).
% 103.77/103.10  all VarCurr (v102(VarCurr,bitIndex0)<->v107(VarCurr,bitIndex0)).
% 103.77/103.10  all VarNext (v97(VarNext,bitIndex1)<->v126(VarNext,bitIndex1)).
% 103.77/103.10  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v127(VarNext)-> (all B (range_5_0(B)-> (v126(VarNext,B)<->v97(VarCurr,B)))))).
% 103.77/103.10  all VarNext (v127(VarNext)-> (all B (range_5_0(B)-> (v126(VarNext,B)<->v137(VarNext,B))))).
% 103.77/103.10  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (all B (range_5_0(B)-> (v137(VarNext,B)<->v135(VarCurr,B))))).
% 103.77/103.10  all VarCurr (-v138(VarCurr)-> (all B (range_5_0(B)-> (v135(VarCurr,B)<->v102(VarCurr,B))))).
% 103.77/103.10  all VarCurr (v138(VarCurr)-> (all B (range_5_0(B)-> (v135(VarCurr,B)<->$F)))).
% 103.77/103.10  all VarCurr (-v138(VarCurr)<->v99(VarCurr)).
% 103.77/103.10  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v127(VarNext)<->v128(VarNext))).
% 103.77/103.10  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v128(VarNext)<->v129(VarNext)&v120(VarNext))).
% 103.77/103.10  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (-v129(VarNext)<->v131(VarNext))).
% 103.77/103.11  all VarNext VarCurr (nextState(VarCurr,VarNext)-> (v131(VarNext)<->v120(VarCurr))).
% 103.77/103.11  all VarCurr (v120(VarCurr)<->v122(VarCurr)).
% 103.77/103.11  all VarCurr (v122(VarCurr)<->v124(VarCurr)).
% 103.77/103.11  all VarCurr (v124(VarCurr)<->v1(VarCurr)).
% 103.77/103.11  all VarCurr (v102(VarCurr,bitIndex1)<->v107(VarCurr,bitIndex1)).
% 103.77/103.11  all VarCurr (-v108(VarCurr)& -v110(VarCurr)& -v114(VarCurr)-> (all B (range_5_0(B)-> (v107(VarCurr,B)<->v97(VarCurr,B))))).
% 103.77/103.11  all VarCurr (v114(VarCurr)-> (all B (range_5_0(B)-> (v107(VarCurr,B)<->v116(VarCurr,B))))).
% 103.77/103.11  all VarCurr (v110(VarCurr)-> (all B (range_5_0(B)-> (v107(VarCurr,B)<->v112(VarCurr,B))))).
% 103.77/103.11  all VarCurr (v108(VarCurr)-> (all B (range_5_0(B)-> (v107(VarCurr,B)<->v97(VarCurr,B))))).
% 103.77/103.11  all VarCurr (v117(VarCurr)<-> (v118(VarCurr,bitIndex1)<->$T)& (v118(VarCurr,bitIndex0)<->$T)).
% 103.77/103.11  b11(bitIndex1).
% 103.77/103.11  b11(bitIndex0).
% 103.77/103.11  all VarCurr (v118(VarCurr,bitIndex0)<->v71(VarCurr)).
% 103.77/103.11  all VarCurr (v118(VarCurr,bitIndex1)<->v63(VarCurr)).
% 103.77/103.11  all VarCurr (v116(VarCurr,bitIndex0)<-
% 103.77/103.11  Search stopped in tp_alloc by max_mem option.
% 103.77/103.11  >$T).
% 103.77/103.11  all VarCurr ((v116(VarCurr,bitIndex5)<->v97(VarCurr,bitIndex4))& (v116(VarCurr,bitIndex4)<->v97(VarCurr,bitIndex3))& (v116(VarCurr,bitIndex3)<->v97(VarCurr,bitIndex2))& (v116(VarCurr,bitIndex2)<->v97(VarCurr,bitIndex1))& (v116(VarCurr,bitIndex1)<->v97(VarCurr,bitIndex0))).
% 103.77/103.11  all VarCurr (v114(VarCurr)<-> (v115(VarCurr,bitIndex1)<->$T)& (v115(VarCurr,bitIndex0)<->$F)).
% 103.77/103.11  b10(bitIndex1).
% 103.77/103.11  -b10(bitIndex0).
% 103.77/103.11  all VarCurr (v115(VarCurr,bitIndex0)<->v71(VarCurr)).
% 103.77/103.11  all VarCurr (v115(VarCurr,bitIndex1)<->v63(VarCurr)).
% 103.77/103.11  all VarCurr ((v112(VarCurr,bitIndex4)<->v97(VarCurr,bitIndex5))& (v112(VarCurr,bitIndex3)<->v97(VarCurr,bitIndex4))& (v112(VarCurr,bitIndex2)<->v97(VarCurr,bitIndex3))& (v112(VarCurr,bitIndex1)<->v97(VarCurr,bitIndex2))& (v112(VarCurr,bitIndex0)<->v97(VarCurr,bitIndex1))).
% 103.77/103.11  all VarCurr (v112(VarCurr,bitIndex5)<->$F).
% 103.77/103.11  all VarCurr (v110(VarCurr)<-> (v111(VarCurr,bitIndex1)<->$F)& (v111(VarCurr,bitIndex0)<->$T)).
% 103.77/103.11  -b01(bitIndex1).
% 103.77/103.11  b01(bitIndex0).
% 103.77/103.11  all VarCurr (v111(VarCurr,bitIndex0)<->v71(VarCurr)).
% 103.77/103.11  all VarCurr (v111(VarCurr,bitIndex1)<->v63(VarCurr)).
% 103.77/103.11  all B (range_5_0(B)-> (v97(constB0,B)<->$F)).
% 103.77/103.11  all B (range_5_0(B)<->bitIndex0=B|bitIndex1=B|bitIndex2=B|bitIndex3=B|bitIndex4=B|bitIndex5=B).
% 103.77/103.11  -b000000(bitIndex5).
% 103.77/103.11  -b000000(bitIndex4).
% 103.77/103.11  -b000000(bitIndex3).
% 103.77/103.11  -b000000(bitIndex2).
% 103.77/103.11  -b000000(bitIndex1).
% 103.77/103.11  -b000000(bitIndex0).
% 103.77/103.11  all VarCurr (v108(VarCurr)<-> (v109(VarCurr,bitIndex1)<->$F)& (v109(VarCurr,bitIndex0)<->$F)).
% 103.77/103.11  -b00(bitIndex1).
% 103.77/103.11  -b00(bitIndex0).
% 103.77/103.11  all VarCurr (v109(VarCurr,bitIndex0)<->v71(VarCurr)).
% 103.77/103.11  all VarCurr (v109(VarCurr,bitIndex1)<->v63(VarCurr)).
% 103.77/103.11  all VarCurr (v99(VarCurr)<->v52(VarCurr)).
% 103.77/103.11  all VarCurr (v63(VarCurr)<->v65(VarCurr)).
% 103.77/103.11  all VarCurr (v65(VarCurr)<->v67(VarCurr)).
% 103.77/103.11  all VarCurr (v67(VarCurr)<->v69(VarCurr)).
% 103.77/103.11  all VarCurr (v50(VarCurr)<->v52(VarCurr)).
% 103.77/103.11  all VarCurr (v52(VarCurr)<->v54(VarCurr)).
% 103.77/103.11  all VarCurr (v54(VarCurr)<->v17(VarCurr)).
% 103.77/103.11  all VarCurr (v15(VarCurr)<->v17(VarCurr)).
% 103.77/103.11  end_of_list.
% 103.77/103.11  
% 103.77/103.11  Search stopped in tp_alloc by max_mem option.
% 103.77/103.11  
% 103.77/103.11  ============ end of search ============
% 103.77/103.11  
% 103.77/103.11  -------------- statistics -------------
% 103.77/103.11  clauses given                  0
% 103.77/103.11  clauses generated              0
% 103.77/103.11  clauses kept                   0
% 103.77/103.11  clauses forward subsumed       0
% 103.77/103.11  clauses back subsumed          0
% 103.77/103.11  Kbytes malloced            11718
% 103.77/103.11  
% 103.77/103.11  ----------- times (seconds) -----------
% 103.77/103.11  user CPU time         27.56          (0 hr, 0 min, 27 sec)
% 103.77/103.11  system CPU time        0.01          (0 hr, 0 min, 0 sec)
% 103.77/103.11  wall-clock time      103             (0 hr, 1 min, 43 sec)
% 103.77/103.11  
% 103.77/103.11  Process 21553 finished Wed Jul 27 06:51:42 2022
% 103.77/103.11  Otter interrupted
% 103.77/103.11  PROOF NOT FOUND
%------------------------------------------------------------------------------