-rw-r--r-- 56391 nttcompiler-20220411/512/ref/ntt.c
// auto-generated; do not edit #include "ntt_512.h" typedef int16_t int16; typedef int32_t int32; static const int16 qdata_7681[] = { #define q qdata[0] 7681, #define qinvscaledzeta_4_1 qdata[1] -28865, #define qinvscaledzeta_4_3 qdata[2] 28865, #define qinvscaledzeta_8_1 qdata[3] -16425, #define qinvscaledzeta_8_7 qdata[4] -10350, #define qround32 qdata[5] 4, #define scaledzeta_4_1 qdata[6] -3777, #define scaledzeta_4_3 qdata[7] 3777, #define scaledzeta_8_1 qdata[8] -3625, #define scaledzeta_8_7 qdata[9] -3182, #define qinvscaledzeta_pow_8_1 (qdata+10) -9,-16425,-28865,10350, #define qinvscaledzeta_pow_8_7 (qdata+14) -9,-10350,28865,16425, #define qinvscaledzeta_pow_16_1 (qdata+18) -9,-4974,-16425,-7244, #define qinvscaledzeta_pow_16_15 (qdata+22) -9,-4496,-10350,-14744, #define qinvscaledzeta_pow_32_1 (qdata+26) -9,-20315,-4974,18242,-16425,18191,-7244,-11655,-28865,20870,14744,-22593,10350,828,4496,23754, #define qinvscaledzeta_pow_32_31 (qdata+42) -9,-23754,-4496,-828,-10350,22593,-14744,-20870,28865,11655,7244,-18191,16425,-18242,4974,20315, #define qinvscaledzeta_pow_64_1 (qdata+58) -9,-10972,-20315,23489,-4974,25597,18242,-2816,-16425,-19351,18191,-3114,-7244,-9488,-11655,19394, #define qinvscaledzeta_pow_64_63 (qdata+74) -9,-7491,-23754,-15307,-4496,-15750,-828,-5759,-10350,22568,22593,-20469,-14744,31177,-20870,26382, #define qinvscaledzeta_pow_128_1 (qdata+90) -9,-14846,-10972,-21066,-20315,-24581,23489,-23242,-4974,-4505,25597,-26279,18242,21467,-2816,15998,-16425,-4983,-19351,14624,18191,-2073,-3114,20674,-7244,-21399,-9488,6246,-11655,-29103,19394,-5930,-28865,-23668,-26382,-28839,20870,6536,-31177,16279,14744,29428,20469,29667,-22593,9215,-22568,-11783,10350,-14121,5759,-5913,828,-1724,15750,11792,4496,25093,15307,26228,23754,-21766,7491,-6817, #define qinvscaledzeta_pow_128_127 (qdata+154) -9,6817,-7491,21766,-23754,-26228,-15307,-25093,-4496,-11792,-15750,1724,-828,5913,-5759,14121,-10350,11783,22568,-9215,22593,-29667,-20469,-29428,-14744,-16279,31177,-6536,-20870,28839,26382,23668,28865,5930,-19394,29103,11655,-6246,9488,21399,7244,-20674,3114,2073,-18191,-14624,19351,4983,16425,-15998,2816,-21467,-18242,26279,-25597,4505,4974,23242,-23489,24581,20315,21066,10972,14846, #define qinvscaledzeta_pow_256_1 (qdata+218) -9,-32252,-14846,-19317,-10972,8472,-21066,-3456,-20315,16655,-24581,12611,23489,-12073,-23242,29871,-4974,6032,-4505,10409,25597,24266,-26279,17030,18242,10478,21467,11962,-2816,-26262,15998,-17602,-16425,-22124,-4983,-26220,-19351,-8908,14624,32738,18191,13575,-2073,27132,-3114,24573,20674,27201,-7244,12269,-21399,-16092,-9488,-15810,6246,15358,-11655,-15768,-29103,24052,19394,-26441,-5930,-1689, #define qinvscaledzeta_pow_256_255 (qdata+282) -9,13541,6817,-5529,-7491,26663,21766,-4693,-23754,13933,-26228,8558,-15307,-21953,-25093,-22875,-4496,-7508,-11792,-30136,-15750,26800,1724,17303,-828,2722,5913,-12013,-5759,30426,14121,3558,-10350,-24743,11783,-21860,22568,-32329,-9215,9360,22593,-7415,-29667,25946,-20469,-21868,-29428,-25511,-14744,1869,-16279,14351,31177,2193,-6536,17440,-20870,24718,28839,-23225,26382,9855,23668,-9599, #define qinvscaledzeta_pow_512_1 (qdata+346) -9,-32124,-32252,10179,-14846,6766,-19317,16638,-10972,-23549,8472,-17082,-21066,-15145,-3456,31518,-20315,-6297,16655,-12261,-24581,-11885,12611,30938,23489,28805,-12073,26783,-23242,-14718,29871,5708,-4974,15111,6032,-29453,-4505,12449,10409,529,25597,-32004,24266,2952,-26279,18003,17030,24931,18242,-1007,10478,-4624,21467,17636,11962,14360,-2816,15972,-26262,16715,15998,4573,-17602,-14539, #define qinvscaledzeta_pow_512_3 (qdata+410) -9,10179,-19317,-23549,-21066,31518,16655,-11885,23489,26783,29871,15111,-4505,529,24266,18003,18242,-4624,11962,15972,15998,-14539,-22124,-17013,-19351,17167,32738,2858,-2073,-16885,24573,-20759,-7244,-8831,-16092,-4565,6246,20119,-15768,1851,19394,-2526,-1689,-16186,-23668,-9633,23225,14650,20870,5990,-2193,-5452,16279,-22764,25511,-26330,20469,25648,7415,1860,9215,16791,21860,4880, #define qinvscaledzeta_pow_512_509 (qdata+474) -9,-23182,-5529,-14223,21766,23933,13933,-23523,-15307,26766,-22875,-22943,-11792,9513,26800,4317,-828,-20435,-12013,-3396,14121,27730,-24743,11996,22568,-25179,9360,2739,-29667,5299,-21868,25563,-14744,-16083,14351,-1715,-6536,2807,24718,-16006,26382,-17491,-9599,3524,5930,-10333,-24052,-10802,11655,12790,15810,30255,21399,10145,-27201,20238,3114,7304,-13575,14129,-14624,-19215,26220,802, #define qinvscaledzeta_pow_512_511 (qdata+538) -9,-6510,13541,-23182,6817,24214,-5529,-24232,-7491,-14223,26663,27858,21766,26621,-4693,23933,-23754,29394,13933,14138,-26228,-23523,8558,-23984,-15307,-13686,-21953,26766,-25093,-9650,-22875,-20964,-4496,-22943,-7508,-27243,-11792,-18506,-30136,9513,-15750,-24530,26800,947,1724,4317,17303,29718,-828,23882,2722,-20435,5913,-10495,-12013,8839,-5759,-3396,30426,15221,14121,26288,3558,27730, #define scaledzeta_pow_8_1 (qdata+602) -3593,-3625,-3777,3182, #define scaledzeta_pow_8_7 (qdata+606) -3593,-3182,3777,3625, #define scaledzeta_pow_16_1 (qdata+610) -3593,2194,-3625,-1100, #define scaledzeta_pow_16_15 (qdata+614) -3593,3696,-3182,-2456, #define scaledzeta_pow_32_1 (qdata+618) -3593,1701,2194,834,-3625,2319,-1100,121,-3777,1414,2456,2495,3182,2876,-3696,2250, #define scaledzeta_pow_32_31 (qdata+634) -3593,-2250,3696,-2876,-3182,-2495,-2456,-1414,3777,-121,1100,-2319,3625,-834,-2194,-1701, #define scaledzeta_pow_64_1 (qdata+650) -3593,3364,1701,-1599,2194,2557,834,-2816,-3625,617,2319,2006,-1100,-1296,121,1986, #define scaledzeta_pow_64_63 (qdata+666) -3593,2237,-2250,-1483,3696,3706,-2876,1921,-3182,2088,-2495,-1525,-2456,1993,-1414,2830, #define scaledzeta_pow_128_1 (qdata+682) -3593,514,3364,438,1701,2555,-1599,-1738,2194,103,2557,1881,834,-549,-2816,638,-3625,-1399,617,-1760,2319,2535,2006,3266,-1100,-1431,-1296,3174,121,3153,1986,-810,-3777,2956,-2830,-679,1414,2440,-1993,-3689,2456,2804,1525,3555,2495,1535,-2088,-7,3182,-1321,-1921,-1305,2876,-3772,-3706,3600,-3696,-2043,1483,-396,2250,-2310,-2237,1887, #define scaledzeta_pow_128_127 (qdata+746) -3593,-1887,2237,2310,-2250,396,-1483,2043,3696,-3600,3706,3772,-2876,1305,1921,1321,-3182,7,2088,-1535,-2495,-3555,-1525,-2804,-2456,3689,1993,-2440,-1414,679,2830,-2956,3777,810,-1986,-3153,-121,-3174,1296,1431,1100,-3266,-2006,-2535,-2319,1760,-617,1399,3625,-638,2816,549,-834,-1881,-2557,-103,-2194,1738,1599,-2555,-1701,-438,-3364,-514, #define scaledzeta_pow_256_1 (qdata+810) -3593,-1532,514,-373,3364,-3816,438,-3456,1701,783,2555,2883,-1599,727,-1738,-2385,2194,-2160,103,-2391,2557,2762,1881,-2426,834,3310,-549,-1350,-2816,1386,638,-194,-3625,404,-1399,-3692,617,-2764,-1760,-1054,2319,1799,2535,-3588,2006,1533,3266,2113,-1100,-2579,-1431,-1756,-1296,1598,3174,-2,121,-3480,3153,-2572,1986,2743,-810,2919, #define scaledzeta_pow_256_255 (qdata+874) -3593,2789,-1887,-921,2237,-1497,2310,-2133,-2250,-915,396,1390,-1483,3135,2043,-859,3696,2732,-3600,-1464,3706,2224,3772,-2665,-2876,1698,1305,2835,1921,730,1321,486,-3182,3417,7,-3428,2088,-3145,-1535,1168,-2495,-3831,-3555,-3750,-1525,660,-2804,2649,-2456,3405,3689,-1521,1993,1681,-2440,1056,-1414,1166,679,-2233,2830,2175,-2956,-1919, #define scaledzeta_pow_512_1 (qdata+938) -3593,-1404,-1532,451,514,-402,-373,1278,3364,-509,-3816,-3770,438,-2345,-3456,-226,1701,-1689,783,-1509,2555,2963,2883,1242,-1599,1669,727,2719,-1738,642,-2385,-436,2194,3335,-2160,1779,103,3745,-2391,17,2557,2812,2762,-1144,1881,83,-2426,-1181,834,-1519,3310,3568,-549,-796,-1350,2072,-2816,-2460,1386,2891,638,-2083,-194,-715, #define scaledzeta_pow_512_3 (qdata+1002) -3593,451,-373,-509,438,-226,783,2963,-1599,2719,-2385,3335,103,17,2762,83,834,3568,-1350,-2460,638,-715,404,1931,617,1295,-1054,-2262,2535,2059,1533,-791,-1100,-1151,-1756,-2005,3174,151,-3480,-3781,1986,-3550,2919,-2874,2956,-929,2233,1338,1414,2918,-1681,692,-3689,-236,-2649,3366,1525,1072,3831,-188,1535,-3177,3428,-3312, #define scaledzeta_pow_512_509 (qdata+1066) -3593,370,-921,1649,2310,893,-915,2589,-1483,3214,-859,1121,-3600,-3287,2224,-2339,-2876,-2515,2835,-1348,1321,2130,3417,-2340,2088,-3163,1168,1203,-3555,3763,660,3547,-2456,1837,-1521,-179,-2440,-777,1166,3450,2830,429,-1919,1476,810,-3677,2572,-1586,-121,1526,-1598,-2001,1431,1441,-2113,-3314,-2006,3208,-1799,-2767,1760,-3343,3692,-222, #define scaledzeta_pow_512_511 (qdata+1130) -3593,658,2789,370,-1887,-3434,-921,-3752,2237,1649,-1497,2258,2310,3581,-2133,893,-2250,3794,-915,826,396,2589,1390,592,-1483,-2422,3135,3214,2043,-434,-859,-2532,3696,1121,2732,2965,-3600,2998,-1464,-3287,3706,1070,2224,-589,3772,-2339,-2665,2070,-2876,2378,1698,-2515,1305,-2815,2835,-2937,1921,-1348,730,-3723,1321,1712,486,2130, } ; static const int16 qdata_10753[] = { // q 10753, // qinvscaledzeta_4_1 -27359, // qinvscaledzeta_4_3 27359, // qinvscaledzeta_8_1 -408, // qinvscaledzeta_8_7 -1956, // qround32 3, // scaledzeta_4_1 -223, // scaledzeta_4_3 223, // scaledzeta_8_1 3688, // scaledzeta_8_7 4188, // qinvscaledzeta_pow_8_1 -6,-408,-27359,1956, // qinvscaledzeta_pow_8_7 -6,-1956,27359,408, // qinvscaledzeta_pow_16_1 -6,-20856,-408,-21094, // qinvscaledzeta_pow_16_15 -6,-10093,-1956,-28517, // qinvscaledzeta_pow_32_1 -6,-9508,-20856,-29449,-408,18345,-21094,-7033,-27359,-16072,28517,-12476,1956,-28224,10093,16090, // qinvscaledzeta_pow_32_31 -6,-16090,-10093,28224,-1956,12476,-28517,16072,27359,7033,21094,-18345,408,29449,20856,9508, // qinvscaledzeta_pow_64_1 -6,-3639,-9508,25543,-20856,829,-29449,-17675,-408,18363,18345,7429,-21094,-10001,-7033,-4547, // qinvscaledzeta_pow_64_63 -6,28103,-16090,3925,-10093,7228,28224,11683,-1956,-23056,12476,14731,-28517,26518,16072,14847, // qinvscaledzeta_pow_128_1 -6,-5619,-3639,-12378,-9508,15736,25543,23007,-20856,-27152,829,-22209,-29449,-20490,-17675,22532,-408,16724,18363,22623,18345,5766,7429,-31369,-21094,15840,-10001,19326,-7033,3407,-4547,2316,-27359,6381,-14847,8441,-16072,-6924,-26518,-4589,28517,12707,-14731,-15864,-12476,31656,23056,24098,1956,-31217,-11683,-24269,-28224,-5126,-7228,20198,10093,-573,-3925,-14341,16090,23781,-28103,-23812, // qinvscaledzeta_pow_128_127 -6,23812,28103,-23781,-16090,14341,3925,573,-10093,-20198,7228,5126,28224,24269,11683,31217,-1956,-24098,-23056,-31656,12476,15864,14731,-12707,-28517,4589,26518,6924,16072,-8441,14847,-6381,27359,-2316,4547,-3407,7033,-19326,10001,-15840,21094,31369,-7429,-5766,-18345,-22623,-18363,-16724,408,-22532,17675,20490,29449,22209,-829,27152,20856,-23007,-25543,-15736,9508,12378,3639,5619, // qinvscaledzeta_pow_256_1 -6,-17412,-5619,2017,-3639,24976,-12378,24702,-9508,-31558,15736,1316,25543,-31418,23007,-512,-20856,-13268,-27152,22044,829,8801,-22209,-12214,-29449,11141,-20490,-17096,-17675,32076,22532,17571,-408,13012,16724,4090,18363,-30546,22623,16614,18345,-17248,5766,22666,7429,-7856,-31369,31235,-21094,28541,15840,-30351,-10001,-177,19326,-31887,-7033,25555,3407,-31290,-4547,-13579,2316,-2395, // qinvscaledzeta_pow_256_255 -6,4175,23812,7326,28103,17352,-23781,-28200,-16090,11555,14341,6978,3925,-1627,573,780,-10093,32271,-20198,7356,7228,29364,5126,27895,28224,-609,24269,21892,11683,-7795,31217,-18845,-1956,29407,-24098,-7716,-23056,-719,-31656,-8246,12476,-26238,15864,11842,14731,1932,-12707,-11726,-28517,4394,4589,2066,26518,-11300,6924,-24037,16072,969,-8441,14999,14847,-11854,-6381,-19844, // qinvscaledzeta_pow_512_1 -6,-13500,-17412,32070,-5619,5120,2017,11952,-3639,1609,24976,9374,-12378,-23836,24702,-8289,-9508,-22471,-31558,25482,15736,-8935,1316,32351,25543,19661,-31418,8295,23007,-25652,-512,-19863,-20856,6917,-13268,-28712,-27152,20899,22044,4083,829,951,8801,29370,-22209,24641,-12214,12976,-29449,-22215,11141,-29626,-20490,30467,-17096,13158,-17675,-24129,32076,7880,22532,-30053,17571,-8758, // qinvscaledzeta_pow_512_3 -6,32070,2017,1609,-12378,-8289,-31558,-8935,25543,8295,-512,6917,-27152,4083,8801,24641,-29449,-29626,-17096,-24129,22532,-8758,13012,15328,18363,-27329,16614,1767,5766,4797,-7856,-14780,-21094,-23160,-30351,-1834,19326,17394,25555,9691,-4547,3699,-2395,20661,6381,-23026,-14999,19320,-16072,27572,11300,-16925,-4589,31583,11726,31924,-14731,-15511,26238,22313,31656,20100,7716,31497, // qinvscaledzeta_pow_512_509 -6,-13164,7326,29541,-23781,12384,11555,-9343,3925,23788,780,-18881,-20198,19856,29364,-12336,28224,16273,21892,-30144,31217,-8027,29407,14280,-23056,6095,-8246,14237,15864,-8161,1932,-21161,-28517,7192,2066,6314,6924,12653,969,-3998,14847,21593,-19844,-3035,-2316,-21770,31290,18875,7033,-12531,177,-11446,-15840,-4717,-31235,-31467,-7429,8807,17248,31058,-22623,23269,-4090,13987, // qinvscaledzeta_pow_512_511 -6,6716,4175,-13164,23812,-26292,7326,-12098,28103,29541,17352,15127,-23781,-7289,-28200,12384,-16090,-29151,11555,-20173,14341,-9343,6978,-22483,3925,61,-1627,23788,573,24025,780,-7722,-10093,-18881,32271,23093,-20198,-24330,7356,19856,7228,29827,29364,15517,5126,-12336,27895,-4248,28224,26286,-609,16273,24269,-5729,21892,-7801,11683,-30144,-7795,4967,31217,5369,-18845,-8027, // scaledzeta_pow_8_1 1018,3688,-223,-4188, // scaledzeta_pow_8_7 1018,4188,223,-3688, // scaledzeta_pow_16_1 1018,-376,3688,-3686, // scaledzeta_pow_16_15 1018,-2413,4188,-357, // scaledzeta_pow_32_1 1018,-3364,-376,4855,3688,425,-3686,2695,-223,-3784,357,-2236,-4188,4544,2413,730, // scaledzeta_pow_32_31 1018,-730,-2413,-4544,4188,2236,-357,3784,223,-2695,3686,-425,-3688,-4855,376,3364, // scaledzeta_pow_64_1 1018,-5175,-3364,2503,-376,1341,4855,-4875,3688,-2629,425,-4347,-3686,3823,2695,-4035, // scaledzeta_pow_64_63 1018,5063,-730,341,-2413,-3012,-4544,-5213,4188,1520,2236,1931,-357,918,3784,4095, // scaledzeta_pow_128_1 1018,3085,-5175,2982,-3364,-4744,2503,-4129,-376,-2576,1341,-193,4855,3062,-4875,4,3688,2388,-2629,-4513,425,4742,-4347,2935,-3686,-544,3823,-2178,2695,847,-4035,268,-223,-1299,-4095,-1287,-3784,-4876,-918,3091,357,-4189,-1931,4616,-2236,2984,-1520,-3550,-4188,-1009,5213,-205,4544,-4102,3012,2790,2413,-1085,-341,-2565,730,-4379,-5063,-1284, // scaledzeta_pow_128_127 1018,1284,5063,4379,-730,2565,341,1085,-2413,-2790,-3012,4102,-4544,205,-5213,1009,4188,3550,1520,-2984,2236,-4616,1931,4189,-357,-3091,918,4876,3784,1287,4095,1299,223,-268,4035,-847,-2695,2178,-3823,544,3686,-2935,4347,-4742,-425,4513,2629,-2388,-3688,-4,4875,-3062,-4855,193,-1341,2576,376,4129,-2503,4744,3364,-2982,5175,-3085, // scaledzeta_pow_256_1 1018,5116,3085,-3615,-5175,400,2982,3198,-3364,2234,-4744,-4828,2503,326,-4129,-512,-376,1068,-2576,-4580,1341,3169,-193,-2998,4855,-635,3062,-4808,-4875,-2740,4,675,3688,-1324,2388,5114,-2629,5294,-4513,-794,425,-864,4742,-886,-4347,336,2935,-2045,-3686,-3715,-544,4977,3823,-2737,-2178,3441,2695,467,847,454,-4035,-779,268,2213, // scaledzeta_pow_256_255 1018,1615,1284,2206,5063,5064,4379,472,-730,-5341,2565,-4286,341,2981,1085,-1268,-2413,-3057,-2790,-2884,-3012,-1356,4102,-3337,-4544,5023,205,-636,-5213,909,1009,-2973,4188,2271,3550,-1572,1520,1841,-2984,970,2236,-4734,-4616,578,1931,-116,4189,1586,-357,-2774,-3091,-1006,918,-5156,4876,4123,3784,-567,1287,151,4095,1458,1299,2684, // scaledzeta_pow_512_1 1018,-3260,5116,-1722,3085,5120,-3615,3760,-5175,73,400,4254,2982,2788,3198,-2657,-3364,569,2234,1930,-4744,-2279,-4828,5215,2503,-4403,326,1639,-4129,5068,-512,-5015,-376,-4859,1068,-40,-2576,4003,-4580,-4621,1341,2487,3169,-2374,-193,2625,-2998,4784,4855,825,-635,2118,3062,-2813,-4808,-4250,-4875,-2113,-2740,-4408,4,-1893,675,458, // scaledzeta_pow_512_3 1018,-1722,-3615,73,2982,-2657,2234,-2279,2503,1639,-512,-4859,-2576,-4621,3169,2625,4855,2118,-4808,-2113,4,458,-1324,-1056,-2629,-5313,-794,-4889,4742,5309,336,-4540,-3686,-2680,4977,5334,-2178,-2062,467,5083,-4035,-5005,2213,693,-1299,-3570,-151,-1160,-3784,-3148,5156,-1053,3091,4447,-1586,1204,-1931,-663,4734,4393,2984,-2428,1572,-2807, // scaledzeta_pow_512_509 1018,5268,2206,1381,4379,-4000,-5341,1409,341,5356,-1268,3135,-2790,-4720,-1356,-4144,-4544,2449,-636,2624,1009,-3419,2271,1992,1520,3535,970,-1635,-4616,-2529,-116,-3241,-357,3096,-1006,-854,4876,4973,-567,1122,4095,-4519,2684,1573,-268,1782,-454,-2117,-2695,-3827,2737,-2230,544,2963,2045,-2283,4347,2151,864,-4782,4513,-4891,-5114,-2909, // scaledzeta_pow_512_511 1018,-3524,1615,5268,1284,4428,2206,-834,5063,1381,5064,279,4379,2439,472,-4000,-730,-2015,-5341,3891,2565,1409,-4286,2605,341,573,2981,5356,1085,-2087,-1268,-554,-2413,3135,-3057,3125,-2790,-778,-2884,-4720,-3012,-3453,-1356,-355,4102,-4144,-3337,-152,-4544,-3410,5023,2449,205,-97,-636,1927,-5213,2624,909,-1689,1009,-4359,-2973,-3419, } ; static int16 add(int16 x,int16 y) { return x+y; } static int16 sub(int16 x,int16 y) { return x-y; } static int16 mullo(int16 x,int16 y) { return x*y; } static int16 mulhi(int16 x,int16 y) { return (x*(int32)y)>>16; } static int16 mulhrs(int16 x,int16 y) { return (x*(int32)y+16384)>>15; } static int16 mulmod_scaled(int16 x,int16 y,int16 qinvy,const int16 *qdata) { int16 b = mulhi(x,y); int16 d = mullo(x,qinvy); int16 e = mulhi(d,q); return sub(b,e); } static int16 reduce(int16 x,const int16 *qdata) { int16 y = mulhrs(x,qround32); y = mullo(y,q); return sub(x,y); } // ----- codegen pass 1 // // startntt 512 // startbatch 512 // // ----- PRECONDITIONS // physical_map (0, 1, 2, 3, 4, 5, 6, 7, 8) () // // transform size 512 // // transform indexing [0, 1, 2, 3, 4, 5, 6, 7, 8] // // transforms per batch 1 // // batch indexing [] // // total batch size 512 // // // modulus x^512-1 pos 0:512 q 7681,10753 bound 512*(5629,5800) // assertranges ... // // // ----- LAYER 1 // // // butterflies() // butterfly 0 256 1 256 1 0 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // // // ----- POSTCONDITIONS AFTER LAYER 1 // // transform size 512 // // transform indexing [0, 1, 2, 3, 4, 5, 6, 7, 8] // // transforms per batch 1 // // batch indexing [] // // total batch size 512 // // // modulus x^256-1 pos 0:256 q 7681,10753 bound 256*(11258,11600) // assertranges ... // // // modulus x^256+1 pos 256:512 q 7681,10753 bound 256*(11258,11600) // assertranges ... // // // ----- LAYER 2 // // // reduce_ifreverse(0,64,1) // reduce_ifreverse 0 64 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // // // reduce_ifreverse(256,320,1) // reduce_ifreverse 256 320 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // // // butterflies() // butterfly 0 128 1 128 1 0 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // butterfly 256 384 1 128 4 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // // // ----- POSTCONDITIONS AFTER LAYER 2 // // transform size 512 // // transform indexing [0, 1, 2, 3, 4, 5, 6, 7, 8] // // transforms per batch 1 // // batch indexing [] // // total batch size 512 // // // modulus x^128-1 pos 0:128 q 7681,10753 bound 128*(22516,23200) // assertranges ... // // // modulus x^128+1 pos 128:256 q 7681,10753 bound 128*(22516,23200) // assertranges ... // // // modulus x^128-zeta4 pos 256:384 q 7681,10753 bound 128*(15747,17016) // assertranges ... // // // modulus x^128+zeta4 pos 384:512 q 7681,10753 bound 128*(15747,17016) // assertranges ... // // // ----- LAYER 3 // // // reduce_ifforward(64,128,1) // reduce_ifforward 64 128 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // // // butterflies() // butterfly 0 64 1 64 1 0 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // butterfly 128 192 1 64 4 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // butterfly 256 320 1 64 8 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // butterfly 384 448 1 64 8 7 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // // // twists() // reduce 0 64 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // twist 64 128 1 128 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // twist 128 192 1 256 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // twist 192 256 1 256 255 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // twist 256 320 1 512 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // twist 320 384 1 512 509 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // twist 384 448 1 512 511 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // twist 448 512 1 512 3 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // // // physical_permute(3,6) // physical_permute (3, 6) (0, 1, 2, 3, 4, 5, 6, 7, 8) () (0, 1, 2, 6, 4, 5, 3, 7, 8) () // // // fold(256) // physical_unmap (0, 1, 2, 6, 4, 5, 3, 7, 8) () // physical_map (0, 1, 2, 6, 4, 5, 3, 7) (8,) // // // fold(128) // physical_unmap (0, 1, 2, 6, 4, 5, 3, 7) (8,) // physical_map (0, 1, 2, 6, 4, 5, 3) (7, 8) // // // fold(64) // physical_unmap (0, 1, 2, 6, 4, 5, 3) (7, 8) // physical_map (0, 1, 2, 6, 4, 5) (3, 7, 8) // // // nextbatch() // stopbatch 512 // startbatch 512 // // // halfbatch() // physical_unmap (0, 1, 2, 6, 4, 5) (3, 7, 8) // stopbatch 512 // doublereps // startbatch 256 // physical_map (0, 1, 2, 6, 4, 5) (3, 7) // // // halfbatch() // physical_unmap (0, 1, 2, 6, 4, 5) (3, 7) // stopbatch 256 // doublereps // startbatch 128 // physical_map (0, 1, 2, 6, 4, 5) (3,) // // // ----- POSTCONDITIONS AFTER LAYER 3 // // transform size 64 // // transform indexing [0, 1, 2, 6, 4, 5] // // transforms per batch 2 // // batch indexing [3] // // total batch size 128 // // // modulus x^64-1 pos 0:64 q 7681,10753 bound 1*(5629,5827) 1*(5629,7613) 1*(5629,7666) 1*(5629,7264) 1*(5629,7639) 1*(5629,7591) 1*(5629,7291) 1*(5629,7204) ... // assertranges ... // // // ----- LAYER 4 // // // butterflies() // butterfly 0 32 1 32 1 0 (0, 1, 2, 6, 4, 5) (3,) // // // ----- POSTCONDITIONS AFTER LAYER 4 // // transform size 64 // // transform indexing [0, 1, 2, 6, 4, 5] // // transforms per batch 2 // // batch indexing [3] // // total batch size 128 // // // modulus x^32-1 pos 0:32 q 7681,10753 bound 1*(11258,13035) 1*(11258,14721) 1*(11258,14855) 1*(11258,14877) 1*(11258,14753) 1*(11258,15282) 1*(11258,14641) 1*(11258,14402) ... // assertranges ... // // // modulus x^32+1 pos 32:64 q 7681,10753 bound 1*(11258,13035) 1*(11258,14721) 1*(11258,14855) 1*(11258,14877) 1*(11258,14753) 1*(11258,15282) 1*(11258,14641) 1*(11258,14402) ... // assertranges ... // // // ----- LAYER 5 // // // butterflies() // butterfly 0 16 1 16 1 0 (0, 1, 2, 6, 4, 5) (3,) // butterfly 32 48 1 16 4 1 (0, 1, 2, 6, 4, 5) (3,) // // // twists() // reduce 0 16 1 (0, 1, 2, 6, 4, 5) (3,) // twist 16 32 1 32 1 (0, 1, 2, 6, 4, 5) (3,) // twist 32 48 1 64 1 (0, 1, 2, 6, 4, 5) (3,) // twist 48 64 1 64 63 (0, 1, 2, 6, 4, 5) (3,) // // // physical_permute(0,1,2,5) // physical_permute (0, 1, 2, 5) (0, 1, 2, 6, 4, 5) (3,) (1, 2, 5, 6, 4, 0) (3,) // // // fold(32) // physical_unmap (1, 2, 5, 6, 4, 0) (3,) // physical_map (1, 2, 5, 6, 4) (0, 3) // // // fold(16) // physical_unmap (1, 2, 5, 6, 4) (0, 3) // physical_map (1, 2, 5, 6) (0, 3, 4) // // // ----- POSTCONDITIONS AFTER LAYER 5 // // transform size 16 // // transform indexing [1, 2, 5, 6] // // transforms per batch 8 // // batch indexing [0, 3, 4] // // total batch size 128 // // // modulus x^16-1 pos 0:16 q 7681,10753 bound 1*(5629,5802) 1*(5629,6967) 1*(5629,6418) 1*(5629,7585) 1*(5629,7020) 1*(5629,6328) 1*(5629,7033) 1*(5629,6954) ... // assertranges ... // // // ----- LAYER 6 // // // butterflies() // butterfly 0 8 1 8 1 0 (1, 2, 5, 6) (0, 3, 4) // // // physical_permute(1,2,4) // physical_permute (1, 2, 4) (1, 2, 5, 6) (0, 3, 4) (2, 4, 5, 6) (0, 3, 1) // // // nextbatch() // stopbatch 128 // startbatch 128 // // // ----- POSTCONDITIONS AFTER LAYER 6 // // transform size 16 // // transform indexing [2, 4, 5, 6] // // transforms per batch 8 // // batch indexing [0, 3, 1] // // total batch size 128 // // // modulus x^8-1 pos 0:8 q 7681,10753 bound 1*(11258,12424) 1*(11258,14021) 1*(11258,12488) 1*(11258,14310) 1*(11258,14290) 1*(11258,13681) 1*(11258,13574) 1*(11258,13540) // assertranges ... // // // modulus x^8+1 pos 8:16 q 7681,10753 bound 1*(11258,12424) 1*(11258,14021) 1*(11258,12488) 1*(11258,14310) 1*(11258,14290) 1*(11258,13681) 1*(11258,13574) 1*(11258,13540) // assertranges ... // // // ----- LAYER 7 // // // butterflies() // butterfly 0 4 1 4 1 0 (2, 4, 5, 6) (0, 3, 1) // butterfly 8 12 1 4 4 1 (2, 4, 5, 6) (0, 3, 1) // // // twists() // reduce 0 4 1 (2, 4, 5, 6) (0, 3, 1) // twist 4 8 1 8 1 (2, 4, 5, 6) (0, 3, 1) // twist 8 12 1 16 1 (2, 4, 5, 6) (0, 3, 1) // twist 12 16 1 16 15 (2, 4, 5, 6) (0, 3, 1) // // // physical_permute(2,6) // physical_permute (2, 6) (2, 4, 5, 6) (0, 3, 1) (6, 4, 5, 2) (0, 3, 1) // // // fold(8) // physical_unmap (6, 4, 5, 2) (0, 3, 1) // physical_map (6, 4, 5) (0, 1, 2, 3) // // // fold(4) // physical_unmap (6, 4, 5) (0, 1, 2, 3) // physical_map (6, 4) (0, 1, 2, 3, 5) // // // ----- POSTCONDITIONS AFTER LAYER 7 // // transform size 4 // // transform indexing [6, 4] // // transforms per batch 32 // // batch indexing [0, 1, 2, 3, 5] // // total batch size 128 // // // modulus x^4-1 pos 0:4 q 7681,10753 bound 1*(5629,5800) 1*(5629,6935) 1*(5629,6521) 1*(5629,7156) // assertranges ... // // // ----- LAYER 8 // // // butterflies() // butterfly 0 2 1 2 1 0 (6, 4) (0, 1, 2, 3, 5) // // // ----- POSTCONDITIONS AFTER LAYER 8 // // transform size 4 // // transform indexing [6, 4] // // transforms per batch 32 // // batch indexing [0, 1, 2, 3, 5] // // total batch size 128 // // // modulus x^2-1 pos 0:2 q 7681,10753 bound 1*(11258,12321) 1*(11258,14091) // assertranges ... // // // modulus x^2+1 pos 2:4 q 7681,10753 bound 1*(11258,12321) 1*(11258,14091) // assertranges ... // // // ----- LAYER 9 // // // butterflies() // butterfly 0 1 1 1 1 0 (6, 4) (0, 1, 2, 3, 5) // butterfly 2 3 1 1 4 1 (6, 4) (0, 1, 2, 3, 5) // // // ----- POSTCONDITIONS AFTER LAYER 9 // // transform size 4 // // transform indexing [6, 4] // // transforms per batch 32 // // batch indexing [0, 1, 2, 3, 5] // // total batch size 128 // // // modulus x^1-1 pos 0:1 q 7681,10753 bound 1*(22516,26412) // assertranges ... // // // modulus x^1+1 pos 1:2 q 7681,10753 bound 1*(22516,26412) // assertranges ... // // // modulus x^1-zeta4 pos 2:3 q 7681,10753 bound 1*(15747,17745) // assertranges ... // // // modulus x^1+zeta4 pos 3:4 q 7681,10753 bound 1*(15747,17745) // assertranges ... // stopbatch 128 // physical_unmap (6, 4) (0, 1, 2, 3, 5) // stopntt 512 // ----- codegen pass 2 // // startntt 512 // startbatch 512 // physical_map (0, 1, 2, 3, 4, 5, 6, 7, 8) () // butterfly 0 256 1 256 1 0 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // reduce_ifreverse 0 64 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // reduce_ifreverse 256 320 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // butterfly 0 128 1 128 1 0 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // butterfly 256 384 1 128 4 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // reduce_ifforward 64 128 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // butterfly 0 64 1 64 1 0 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // butterfly 128 192 1 64 4 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // butterfly 256 320 1 64 8 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // butterfly 384 448 1 64 8 7 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // reduce 0 64 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // twist 64 128 1 128 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // twist 128 192 1 256 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // twist 192 256 1 256 255 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // twist 256 320 1 512 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // twist 320 384 1 512 509 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // twist 384 448 1 512 511 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // twist 448 512 1 512 3 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // physical_permute (3, 6) (0, 1, 2, 3, 4, 5, 6, 7, 8) () (0, 1, 2, 6, 4, 5, 3, 7, 8) () // stopbatch 512 // doublereps // doublereps // startbatch 128 // physical_unmap (0, 1, 2, 6, 4, 5, 3, 7, 8) () // physical_map (0, 1, 2, 6, 4, 5) (3,) // butterfly 0 32 1 32 1 0 (0, 1, 2, 6, 4, 5) (3,) // butterfly 0 16 1 16 1 0 (0, 1, 2, 6, 4, 5) (3,) // butterfly 32 48 1 16 4 1 (0, 1, 2, 6, 4, 5) (3,) // reduce 0 16 1 (0, 1, 2, 6, 4, 5) (3,) // twist 16 32 1 32 1 (0, 1, 2, 6, 4, 5) (3,) // twist 32 48 1 64 1 (0, 1, 2, 6, 4, 5) (3,) // twist 48 64 1 64 63 (0, 1, 2, 6, 4, 5) (3,) // physical_permute (0, 1, 2, 5) (0, 1, 2, 6, 4, 5) (3,) (1, 2, 5, 6, 4, 0) (3,) // physical_unmap (1, 2, 5, 6, 4, 0) (3,) // physical_map (1, 2, 5, 6) (0, 3, 4) // butterfly 0 8 1 8 1 0 (1, 2, 5, 6) (0, 3, 4) // physical_permute (1, 2, 4) (1, 2, 5, 6) (0, 3, 4) (2, 4, 5, 6) (0, 3, 1) // stopbatch 128 // startbatch 128 // butterfly 0 4 1 4 1 0 (2, 4, 5, 6) (0, 3, 1) // butterfly 8 12 1 4 4 1 (2, 4, 5, 6) (0, 3, 1) // reduce 0 4 1 (2, 4, 5, 6) (0, 3, 1) // twist 4 8 1 8 1 (2, 4, 5, 6) (0, 3, 1) // twist 8 12 1 16 1 (2, 4, 5, 6) (0, 3, 1) // twist 12 16 1 16 15 (2, 4, 5, 6) (0, 3, 1) // physical_permute (2, 6) (2, 4, 5, 6) (0, 3, 1) (6, 4, 5, 2) (0, 3, 1) // physical_unmap (6, 4, 5, 2) (0, 3, 1) // physical_map (6, 4) (0, 1, 2, 3, 5) // butterfly 0 2 1 2 1 0 (6, 4) (0, 1, 2, 3, 5) // butterfly 0 1 1 1 1 0 (6, 4) (0, 1, 2, 3, 5) // butterfly 2 3 1 1 4 1 (6, 4) (0, 1, 2, 3, 5) // stopbatch 128 // physical_unmap (6, 4) (0, 1, 2, 3, 5) // stopntt 512 // startntt 512 static void ntt512(int16 *f,long long reps,const int16 *qdata) { // startbatch 512 for (long long r = 0;r < reps;++r) { // physical_map (0, 1, 2, 3, 4, 5, 6, 7, 8) () #define F(t,v) f[((((v)>>0)&1)<<0)+((((v)>>1)&1)<<1)+((((v)>>2)&1)<<2)+((((v)>>3)&1)<<3)+((((v)>>4)&1)<<4)+((((v)>>5)&1)<<5)+((((v)>>6)&1)<<6)+((((v)>>7)&1)<<7)+((((v)>>8)&1)<<8)] // butterfly 0 256 1 256 1 0 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 256;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+256); F(t,j) = add(f0,f1); F(t,j+256) = sub(f0,f1); } } // reduce_ifreverse 0 64 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // reduce_ifreverse 256 320 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // butterfly 0 128 1 128 1 0 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 128;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+128); F(t,j) = add(f0,f1); F(t,j+128) = sub(f0,f1); } } // butterfly 256 384 1 128 4 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 256;j != 384;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+128); f1 = mulmod_scaled(f1,scaledzeta_4_1,qinvscaledzeta_4_1,qdata); F(t,j) = add(f0,f1); F(t,j+128) = sub(f0,f1); } } // reduce_ifforward 64 128 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 64;j != 128;j += 1) { int16 f0 = F(t,j); f0 = reduce(f0,qdata); F(t,j) = f0; } } // butterfly 0 64 1 64 1 0 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 64;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+64); F(t,j) = add(f0,f1); F(t,j+64) = sub(f0,f1); } } // butterfly 128 192 1 64 4 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 128;j != 192;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+64); f1 = mulmod_scaled(f1,scaledzeta_4_1,qinvscaledzeta_4_1,qdata); F(t,j) = add(f0,f1); F(t,j+64) = sub(f0,f1); } } // butterfly 256 320 1 64 8 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 256;j != 320;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+64); f1 = mulmod_scaled(f1,scaledzeta_8_1,qinvscaledzeta_8_1,qdata); F(t,j) = add(f0,f1); F(t,j+64) = sub(f0,f1); } } // butterfly 384 448 1 64 8 7 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 384;j != 448;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+64); f1 = mulmod_scaled(f1,scaledzeta_8_7,qinvscaledzeta_8_7,qdata); F(t,j) = add(f0,f1); F(t,j+64) = sub(f0,f1); } } // reduce 0 64 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 64;j += 1) { int16 f0 = F(t,j); f0 = reduce(f0,qdata); F(t,j) = f0; } } // twist 64 128 1 128 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 64;++j) { int16 f0 = F(t,64+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_128_1[j],qinvscaledzeta_pow_128_1[j],qdata); F(t,64+1*j) = f0; } } // twist 128 192 1 256 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 64;++j) { int16 f0 = F(t,128+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_256_1[j],qinvscaledzeta_pow_256_1[j],qdata); F(t,128+1*j) = f0; } } // twist 192 256 1 256 255 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 64;++j) { int16 f0 = F(t,192+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_256_255[j],qinvscaledzeta_pow_256_255[j],qdata); F(t,192+1*j) = f0; } } // twist 256 320 1 512 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 64;++j) { int16 f0 = F(t,256+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_512_1[j],qinvscaledzeta_pow_512_1[j],qdata); F(t,256+1*j) = f0; } } // twist 320 384 1 512 509 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 64;++j) { int16 f0 = F(t,320+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_512_509[j],qinvscaledzeta_pow_512_509[j],qdata); F(t,320+1*j) = f0; } } // twist 384 448 1 512 511 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 64;++j) { int16 f0 = F(t,384+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_512_511[j],qinvscaledzeta_pow_512_511[j],qdata); F(t,384+1*j) = f0; } } // twist 448 512 1 512 3 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 64;++j) { int16 f0 = F(t,448+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_512_3[j],qinvscaledzeta_pow_512_3[j],qdata); F(t,448+1*j) = f0; } } // physical_permute (3, 6) (0, 1, 2, 3, 4, 5, 6, 7, 8) () (0, 1, 2, 6, 4, 5, 3, 7, 8) () { int16 rearrange[512]; for (long long t = 0;t < 1;++t) for (long long j = 0;j < 512;++j) rearrange[t*512+j] = F(t,j); #undef F #define F(t,v) f[((((v)>>0)&1)<<0)+((((v)>>1)&1)<<1)+((((v)>>2)&1)<<2)+((((v)>>3)&1)<<6)+((((v)>>4)&1)<<4)+((((v)>>5)&1)<<5)+((((v)>>6)&1)<<3)+((((v)>>7)&1)<<7)+((((v)>>8)&1)<<8)] for (long long t = 0;t < 1;++t) for (long long j = 0;j < 512;++j) F(t,j) = rearrange[t*512+j]; } // stopbatch 512 f += 512; } f -= 512*reps; // doublereps reps *= 2; // doublereps reps *= 2; // startbatch 128 for (long long r = 0;r < reps;++r) { // physical_unmap (0, 1, 2, 6, 4, 5, 3, 7, 8) () #undef F // physical_map (0, 1, 2, 6, 4, 5) (3,) #define F(t,v) f[((((t)>>0)&1)<<3)+((((v)>>0)&1)<<0)+((((v)>>1)&1)<<1)+((((v)>>2)&1)<<2)+((((v)>>3)&1)<<6)+((((v)>>4)&1)<<4)+((((v)>>5)&1)<<5)] // butterfly 0 32 1 32 1 0 (0, 1, 2, 6, 4, 5) (3,) for (long long t = 0;t < 2;++t) { for (long long j = 0;j != 32;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+32); F(t,j) = add(f0,f1); F(t,j+32) = sub(f0,f1); } } // butterfly 0 16 1 16 1 0 (0, 1, 2, 6, 4, 5) (3,) for (long long t = 0;t < 2;++t) { for (long long j = 0;j != 16;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+16); F(t,j) = add(f0,f1); F(t,j+16) = sub(f0,f1); } } // butterfly 32 48 1 16 4 1 (0, 1, 2, 6, 4, 5) (3,) for (long long t = 0;t < 2;++t) { for (long long j = 32;j != 48;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+16); f1 = mulmod_scaled(f1,scaledzeta_4_1,qinvscaledzeta_4_1,qdata); F(t,j) = add(f0,f1); F(t,j+16) = sub(f0,f1); } } // reduce 0 16 1 (0, 1, 2, 6, 4, 5) (3,) for (long long t = 0;t < 2;++t) { for (long long j = 0;j != 16;j += 1) { int16 f0 = F(t,j); f0 = reduce(f0,qdata); F(t,j) = f0; } } // twist 16 32 1 32 1 (0, 1, 2, 6, 4, 5) (3,) for (long long t = 0;t < 2;++t) { for (long long j = 0;j != 16;++j) { int16 f0 = F(t,16+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_32_1[j],qinvscaledzeta_pow_32_1[j],qdata); F(t,16+1*j) = f0; } } // twist 32 48 1 64 1 (0, 1, 2, 6, 4, 5) (3,) for (long long t = 0;t < 2;++t) { for (long long j = 0;j != 16;++j) { int16 f0 = F(t,32+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_64_1[j],qinvscaledzeta_pow_64_1[j],qdata); F(t,32+1*j) = f0; } } // twist 48 64 1 64 63 (0, 1, 2, 6, 4, 5) (3,) for (long long t = 0;t < 2;++t) { for (long long j = 0;j != 16;++j) { int16 f0 = F(t,48+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_64_63[j],qinvscaledzeta_pow_64_63[j],qdata); F(t,48+1*j) = f0; } } // physical_permute (0, 1, 2, 5) (0, 1, 2, 6, 4, 5) (3,) (1, 2, 5, 6, 4, 0) (3,) { int16 rearrange[128]; for (long long t = 0;t < 2;++t) for (long long j = 0;j < 64;++j) rearrange[t*64+j] = F(t,j); #undef F #define F(t,v) f[((((t)>>0)&1)<<3)+((((v)>>0)&1)<<1)+((((v)>>1)&1)<<2)+((((v)>>2)&1)<<5)+((((v)>>3)&1)<<6)+((((v)>>4)&1)<<4)+((((v)>>5)&1)<<0)] for (long long t = 0;t < 2;++t) for (long long j = 0;j < 64;++j) F(t,j) = rearrange[t*64+j]; } // physical_unmap (1, 2, 5, 6, 4, 0) (3,) #undef F // physical_map (1, 2, 5, 6) (0, 3, 4) #define F(t,v) f[((((t)>>0)&1)<<0)+((((t)>>1)&1)<<3)+((((t)>>2)&1)<<4)+((((v)>>0)&1)<<1)+((((v)>>1)&1)<<2)+((((v)>>2)&1)<<5)+((((v)>>3)&1)<<6)] // butterfly 0 8 1 8 1 0 (1, 2, 5, 6) (0, 3, 4) for (long long t = 0;t < 8;++t) { for (long long j = 0;j != 8;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+8); F(t,j) = add(f0,f1); F(t,j+8) = sub(f0,f1); } } // physical_permute (1, 2, 4) (1, 2, 5, 6) (0, 3, 4) (2, 4, 5, 6) (0, 3, 1) { int16 rearrange[128]; for (long long t = 0;t < 8;++t) for (long long j = 0;j < 16;++j) rearrange[t*16+j] = F(t,j); #undef F #define F(t,v) f[((((t)>>0)&1)<<0)+((((t)>>1)&1)<<3)+((((t)>>2)&1)<<1)+((((v)>>0)&1)<<2)+((((v)>>1)&1)<<4)+((((v)>>2)&1)<<5)+((((v)>>3)&1)<<6)] for (long long t = 0;t < 8;++t) for (long long j = 0;j < 16;++j) F(t,j) = rearrange[t*16+j]; } // stopbatch 128 f += 128; } f -= 128*reps; // startbatch 128 for (long long r = 0;r < reps;++r) { // butterfly 0 4 1 4 1 0 (2, 4, 5, 6) (0, 3, 1) for (long long t = 0;t < 8;++t) { for (long long j = 0;j != 4;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+4); F(t,j) = add(f0,f1); F(t,j+4) = sub(f0,f1); } } // butterfly 8 12 1 4 4 1 (2, 4, 5, 6) (0, 3, 1) for (long long t = 0;t < 8;++t) { for (long long j = 8;j != 12;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+4); f1 = mulmod_scaled(f1,scaledzeta_4_1,qinvscaledzeta_4_1,qdata); F(t,j) = add(f0,f1); F(t,j+4) = sub(f0,f1); } } // reduce 0 4 1 (2, 4, 5, 6) (0, 3, 1) for (long long t = 0;t < 8;++t) { for (long long j = 0;j != 4;j += 1) { int16 f0 = F(t,j); f0 = reduce(f0,qdata); F(t,j) = f0; } } // twist 4 8 1 8 1 (2, 4, 5, 6) (0, 3, 1) for (long long t = 0;t < 8;++t) { for (long long j = 0;j != 4;++j) { int16 f0 = F(t,4+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_8_1[j],qinvscaledzeta_pow_8_1[j],qdata); F(t,4+1*j) = f0; } } // twist 8 12 1 16 1 (2, 4, 5, 6) (0, 3, 1) for (long long t = 0;t < 8;++t) { for (long long j = 0;j != 4;++j) { int16 f0 = F(t,8+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_16_1[j],qinvscaledzeta_pow_16_1[j],qdata); F(t,8+1*j) = f0; } } // twist 12 16 1 16 15 (2, 4, 5, 6) (0, 3, 1) for (long long t = 0;t < 8;++t) { for (long long j = 0;j != 4;++j) { int16 f0 = F(t,12+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_16_15[j],qinvscaledzeta_pow_16_15[j],qdata); F(t,12+1*j) = f0; } } // physical_permute (2, 6) (2, 4, 5, 6) (0, 3, 1) (6, 4, 5, 2) (0, 3, 1) { int16 rearrange[128]; for (long long t = 0;t < 8;++t) for (long long j = 0;j < 16;++j) rearrange[t*16+j] = F(t,j); #undef F #define F(t,v) f[((((t)>>0)&1)<<0)+((((t)>>1)&1)<<3)+((((t)>>2)&1)<<1)+((((v)>>0)&1)<<6)+((((v)>>1)&1)<<4)+((((v)>>2)&1)<<5)+((((v)>>3)&1)<<2)] for (long long t = 0;t < 8;++t) for (long long j = 0;j < 16;++j) F(t,j) = rearrange[t*16+j]; } // physical_unmap (6, 4, 5, 2) (0, 3, 1) #undef F // physical_map (6, 4) (0, 1, 2, 3, 5) #define F(t,v) f[((((t)>>0)&1)<<0)+((((t)>>1)&1)<<1)+((((t)>>2)&1)<<2)+((((t)>>3)&1)<<3)+((((t)>>4)&1)<<5)+((((v)>>0)&1)<<6)+((((v)>>1)&1)<<4)] // butterfly 0 2 1 2 1 0 (6, 4) (0, 1, 2, 3, 5) for (long long t = 0;t < 32;++t) { for (long long j = 0;j != 2;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+2); F(t,j) = add(f0,f1); F(t,j+2) = sub(f0,f1); } } // butterfly 0 1 1 1 1 0 (6, 4) (0, 1, 2, 3, 5) for (long long t = 0;t < 32;++t) { for (long long j = 0;j != 1;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+1); F(t,j) = add(f0,f1); F(t,j+1) = sub(f0,f1); } } // butterfly 2 3 1 1 4 1 (6, 4) (0, 1, 2, 3, 5) for (long long t = 0;t < 32;++t) { for (long long j = 2;j != 3;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+1); f1 = mulmod_scaled(f1,scaledzeta_4_1,qinvscaledzeta_4_1,qdata); F(t,j) = add(f0,f1); F(t,j+1) = sub(f0,f1); } } // stopbatch 128 f += 128; } f -= 128*reps; // physical_unmap (6, 4) (0, 1, 2, 3, 5) #undef F // stopntt 512 } void ntt_512_7681(int16 *f,long long reps) { ntt512(f,reps,qdata_7681); } void ntt_512_10753(int16 *f,long long reps) { ntt512(f,reps,qdata_10753); } // inv stopntt 512 static void invntt512(int16 *f,long long reps,const int16 *qdata) { reps *= 4; // inv physical_unmap (6, 4) (0, 1, 2, 3, 5) #define F(t,v) f[((((t)>>0)&1)<<0)+((((t)>>1)&1)<<1)+((((t)>>2)&1)<<2)+((((t)>>3)&1)<<3)+((((t)>>4)&1)<<5)+((((v)>>0)&1)<<6)+((((v)>>1)&1)<<4)] // inv stopbatch 128 for (long long r = 0;r < reps;++r) { // inv butterfly 2 3 1 1 4 1 (6, 4) (0, 1, 2, 3, 5) for (long long t = 0;t < 32;++t) { for (long long j = 2;j != 3;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+1); F(t,j) = add(f0,f1); f1 = sub(f0,f1); f1 = mulmod_scaled(f1,scaledzeta_4_3,qinvscaledzeta_4_3,qdata); F(t,j+1) = f1; } } // inv butterfly 0 1 1 1 1 0 (6, 4) (0, 1, 2, 3, 5) for (long long t = 0;t < 32;++t) { for (long long j = 0;j != 1;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+1); F(t,j) = add(f0,f1); f1 = sub(f0,f1); F(t,j+1) = f1; } } // inv butterfly 0 2 1 2 1 0 (6, 4) (0, 1, 2, 3, 5) for (long long t = 0;t < 32;++t) { for (long long j = 0;j != 2;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+2); F(t,j) = add(f0,f1); f1 = sub(f0,f1); F(t,j+2) = f1; } } // inv physical_map (6, 4) (0, 1, 2, 3, 5) #undef F // inv physical_unmap (6, 4, 5, 2) (0, 3, 1) #define F(t,v) f[((((t)>>0)&1)<<0)+((((t)>>1)&1)<<3)+((((t)>>2)&1)<<1)+((((v)>>0)&1)<<6)+((((v)>>1)&1)<<4)+((((v)>>2)&1)<<5)+((((v)>>3)&1)<<2)] // inv physical_permute (2, 6) (2, 4, 5, 6) (0, 3, 1) (6, 4, 5, 2) (0, 3, 1) { int16 rearrange[128]; for (long long t = 0;t < 8;++t) for (long long j = 0;j < 16;++j) rearrange[t*16+j] = F(t,j); #undef F #define F(t,v) f[((((t)>>0)&1)<<0)+((((t)>>1)&1)<<3)+((((t)>>2)&1)<<1)+((((v)>>0)&1)<<2)+((((v)>>1)&1)<<4)+((((v)>>2)&1)<<5)+((((v)>>3)&1)<<6)] for (long long t = 0;t < 8;++t) for (long long j = 0;j < 16;++j) F(t,j) = rearrange[t*16+j]; } // inv twist 12 16 1 16 15 (2, 4, 5, 6) (0, 3, 1) for (long long t = 0;t < 8;++t) { for (long long j = 0;j != 4;++j) { int16 f0 = F(t,12+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_16_1[j],qinvscaledzeta_pow_16_1[j],qdata); F(t,12+1*j) = f0; } } // inv twist 8 12 1 16 1 (2, 4, 5, 6) (0, 3, 1) for (long long t = 0;t < 8;++t) { for (long long j = 0;j != 4;++j) { int16 f0 = F(t,8+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_16_15[j],qinvscaledzeta_pow_16_15[j],qdata); F(t,8+1*j) = f0; } } // inv twist 4 8 1 8 1 (2, 4, 5, 6) (0, 3, 1) for (long long t = 0;t < 8;++t) { for (long long j = 0;j != 4;++j) { int16 f0 = F(t,4+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_8_7[j],qinvscaledzeta_pow_8_7[j],qdata); F(t,4+1*j) = f0; } } // inv reduce 0 4 1 (2, 4, 5, 6) (0, 3, 1) for (long long t = 0;t < 8;++t) { for (long long j = 0;j != 4;j += 1) { int16 f0 = F(t,j); f0 = reduce(f0,qdata); F(t,j) = f0; } } // inv butterfly 8 12 1 4 4 1 (2, 4, 5, 6) (0, 3, 1) for (long long t = 0;t < 8;++t) { for (long long j = 8;j != 12;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+4); F(t,j) = add(f0,f1); f1 = sub(f0,f1); f1 = mulmod_scaled(f1,scaledzeta_4_3,qinvscaledzeta_4_3,qdata); F(t,j+4) = f1; } } // inv butterfly 0 4 1 4 1 0 (2, 4, 5, 6) (0, 3, 1) for (long long t = 0;t < 8;++t) { for (long long j = 0;j != 4;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+4); F(t,j) = add(f0,f1); f1 = sub(f0,f1); F(t,j+4) = f1; } } // inv startbatch 128 f += 128; } f -= 128*reps; // inv stopbatch 128 for (long long r = 0;r < reps;++r) { // inv physical_permute (1, 2, 4) (1, 2, 5, 6) (0, 3, 4) (2, 4, 5, 6) (0, 3, 1) { int16 rearrange[128]; for (long long t = 0;t < 8;++t) for (long long j = 0;j < 16;++j) rearrange[t*16+j] = F(t,j); #undef F #define F(t,v) f[((((t)>>0)&1)<<0)+((((t)>>1)&1)<<3)+((((t)>>2)&1)<<4)+((((v)>>0)&1)<<1)+((((v)>>1)&1)<<2)+((((v)>>2)&1)<<5)+((((v)>>3)&1)<<6)] for (long long t = 0;t < 8;++t) for (long long j = 0;j < 16;++j) F(t,j) = rearrange[t*16+j]; } // inv butterfly 0 8 1 8 1 0 (1, 2, 5, 6) (0, 3, 4) for (long long t = 0;t < 8;++t) { for (long long j = 0;j != 8;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+8); F(t,j) = add(f0,f1); f1 = sub(f0,f1); F(t,j+8) = f1; } } // inv physical_map (1, 2, 5, 6) (0, 3, 4) #undef F // inv physical_unmap (1, 2, 5, 6, 4, 0) (3,) #define F(t,v) f[((((t)>>0)&1)<<3)+((((v)>>0)&1)<<1)+((((v)>>1)&1)<<2)+((((v)>>2)&1)<<5)+((((v)>>3)&1)<<6)+((((v)>>4)&1)<<4)+((((v)>>5)&1)<<0)] // inv physical_permute (0, 1, 2, 5) (0, 1, 2, 6, 4, 5) (3,) (1, 2, 5, 6, 4, 0) (3,) { int16 rearrange[128]; for (long long t = 0;t < 2;++t) for (long long j = 0;j < 64;++j) rearrange[t*64+j] = F(t,j); #undef F #define F(t,v) f[((((t)>>0)&1)<<3)+((((v)>>0)&1)<<0)+((((v)>>1)&1)<<1)+((((v)>>2)&1)<<2)+((((v)>>3)&1)<<6)+((((v)>>4)&1)<<4)+((((v)>>5)&1)<<5)] for (long long t = 0;t < 2;++t) for (long long j = 0;j < 64;++j) F(t,j) = rearrange[t*64+j]; } // inv twist 48 64 1 64 63 (0, 1, 2, 6, 4, 5) (3,) for (long long t = 0;t < 2;++t) { for (long long j = 0;j != 16;++j) { int16 f0 = F(t,48+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_64_1[j],qinvscaledzeta_pow_64_1[j],qdata); F(t,48+1*j) = f0; } } // inv twist 32 48 1 64 1 (0, 1, 2, 6, 4, 5) (3,) for (long long t = 0;t < 2;++t) { for (long long j = 0;j != 16;++j) { int16 f0 = F(t,32+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_64_63[j],qinvscaledzeta_pow_64_63[j],qdata); F(t,32+1*j) = f0; } } // inv twist 16 32 1 32 1 (0, 1, 2, 6, 4, 5) (3,) for (long long t = 0;t < 2;++t) { for (long long j = 0;j != 16;++j) { int16 f0 = F(t,16+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_32_31[j],qinvscaledzeta_pow_32_31[j],qdata); F(t,16+1*j) = f0; } } // inv reduce 0 16 1 (0, 1, 2, 6, 4, 5) (3,) for (long long t = 0;t < 2;++t) { for (long long j = 0;j != 16;j += 1) { int16 f0 = F(t,j); f0 = reduce(f0,qdata); F(t,j) = f0; } } // inv butterfly 32 48 1 16 4 1 (0, 1, 2, 6, 4, 5) (3,) for (long long t = 0;t < 2;++t) { for (long long j = 32;j != 48;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+16); F(t,j) = add(f0,f1); f1 = sub(f0,f1); f1 = mulmod_scaled(f1,scaledzeta_4_3,qinvscaledzeta_4_3,qdata); F(t,j+16) = f1; } } // inv butterfly 0 16 1 16 1 0 (0, 1, 2, 6, 4, 5) (3,) for (long long t = 0;t < 2;++t) { for (long long j = 0;j != 16;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+16); F(t,j) = add(f0,f1); f1 = sub(f0,f1); F(t,j+16) = f1; } } // inv butterfly 0 32 1 32 1 0 (0, 1, 2, 6, 4, 5) (3,) for (long long t = 0;t < 2;++t) { for (long long j = 0;j != 32;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+32); F(t,j) = add(f0,f1); f1 = sub(f0,f1); F(t,j+32) = f1; } } // inv physical_map (0, 1, 2, 6, 4, 5) (3,) #undef F // inv physical_unmap (0, 1, 2, 6, 4, 5, 3, 7, 8) () #define F(t,v) f[((((v)>>0)&1)<<0)+((((v)>>1)&1)<<1)+((((v)>>2)&1)<<2)+((((v)>>3)&1)<<6)+((((v)>>4)&1)<<4)+((((v)>>5)&1)<<5)+((((v)>>6)&1)<<3)+((((v)>>7)&1)<<7)+((((v)>>8)&1)<<8)] // inv startbatch 128 f += 128; } f -= 128*reps; // inv doublereps reps /= 2; // inv doublereps reps /= 2; // inv stopbatch 512 for (long long r = 0;r < reps;++r) { // inv physical_permute (3, 6) (0, 1, 2, 3, 4, 5, 6, 7, 8) () (0, 1, 2, 6, 4, 5, 3, 7, 8) () { int16 rearrange[512]; for (long long t = 0;t < 1;++t) for (long long j = 0;j < 512;++j) rearrange[t*512+j] = F(t,j); #undef F #define F(t,v) f[((((v)>>0)&1)<<0)+((((v)>>1)&1)<<1)+((((v)>>2)&1)<<2)+((((v)>>3)&1)<<3)+((((v)>>4)&1)<<4)+((((v)>>5)&1)<<5)+((((v)>>6)&1)<<6)+((((v)>>7)&1)<<7)+((((v)>>8)&1)<<8)] for (long long t = 0;t < 1;++t) for (long long j = 0;j < 512;++j) F(t,j) = rearrange[t*512+j]; } // inv twist 448 512 1 512 3 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 64;++j) { int16 f0 = F(t,448+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_512_509[j],qinvscaledzeta_pow_512_509[j],qdata); F(t,448+1*j) = f0; } } // inv twist 384 448 1 512 511 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 64;++j) { int16 f0 = F(t,384+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_512_1[j],qinvscaledzeta_pow_512_1[j],qdata); F(t,384+1*j) = f0; } } // inv twist 320 384 1 512 509 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 64;++j) { int16 f0 = F(t,320+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_512_3[j],qinvscaledzeta_pow_512_3[j],qdata); F(t,320+1*j) = f0; } } // inv twist 256 320 1 512 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 64;++j) { int16 f0 = F(t,256+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_512_511[j],qinvscaledzeta_pow_512_511[j],qdata); F(t,256+1*j) = f0; } } // inv twist 192 256 1 256 255 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 64;++j) { int16 f0 = F(t,192+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_256_1[j],qinvscaledzeta_pow_256_1[j],qdata); F(t,192+1*j) = f0; } } // inv twist 128 192 1 256 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 64;++j) { int16 f0 = F(t,128+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_256_255[j],qinvscaledzeta_pow_256_255[j],qdata); F(t,128+1*j) = f0; } } // inv twist 64 128 1 128 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 64;++j) { int16 f0 = F(t,64+1*j); f0 = mulmod_scaled(f0,scaledzeta_pow_128_127[j],qinvscaledzeta_pow_128_127[j],qdata); F(t,64+1*j) = f0; } } // inv reduce 0 64 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 64;j += 1) { int16 f0 = F(t,j); f0 = reduce(f0,qdata); F(t,j) = f0; } } // inv butterfly 384 448 1 64 8 7 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 384;j != 448;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+64); F(t,j) = add(f0,f1); f1 = sub(f0,f1); f1 = mulmod_scaled(f1,scaledzeta_8_1,qinvscaledzeta_8_1,qdata); F(t,j+64) = f1; } } // inv butterfly 256 320 1 64 8 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 256;j != 320;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+64); F(t,j) = add(f0,f1); f1 = sub(f0,f1); f1 = mulmod_scaled(f1,scaledzeta_8_7,qinvscaledzeta_8_7,qdata); F(t,j+64) = f1; } } // inv butterfly 128 192 1 64 4 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 128;j != 192;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+64); F(t,j) = add(f0,f1); f1 = sub(f0,f1); f1 = mulmod_scaled(f1,scaledzeta_4_3,qinvscaledzeta_4_3,qdata); F(t,j+64) = f1; } } // inv butterfly 0 64 1 64 1 0 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 64;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+64); F(t,j) = add(f0,f1); f1 = sub(f0,f1); F(t,j+64) = f1; } } // inv reduce_ifforward 64 128 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () // inv butterfly 256 384 1 128 4 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 256;j != 384;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+128); F(t,j) = add(f0,f1); f1 = sub(f0,f1); f1 = mulmod_scaled(f1,scaledzeta_4_3,qinvscaledzeta_4_3,qdata); F(t,j+128) = f1; } } // inv butterfly 0 128 1 128 1 0 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 128;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+128); F(t,j) = add(f0,f1); f1 = sub(f0,f1); F(t,j+128) = f1; } } // inv reduce_ifreverse 256 320 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 256;j != 320;j += 1) { int16 f0 = F(t,j); f0 = reduce(f0,qdata); F(t,j) = f0; } } // inv reduce_ifreverse 0 64 1 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 64;j += 1) { int16 f0 = F(t,j); f0 = reduce(f0,qdata); F(t,j) = f0; } } // inv butterfly 0 256 1 256 1 0 (0, 1, 2, 3, 4, 5, 6, 7, 8) () for (long long t = 0;t < 1;++t) { for (long long j = 0;j != 256;j += 1) { int16 f0 = F(t,j); int16 f1 = F(t,j+256); F(t,j) = add(f0,f1); f1 = sub(f0,f1); F(t,j+256) = f1; } } // inv physical_map (0, 1, 2, 3, 4, 5, 6, 7, 8) () #undef F // inv startbatch 512 f += 512; } f -= 512*reps; // inv startntt 512 } void ntt_512_7681_inv(int16 *f,long long reps) { invntt512(f,reps,qdata_7681); } void ntt_512_10753_inv(int16 *f,long long reps) { invntt512(f,reps,qdata_10753); }