13112 lines
257 KiB
JSON
13112 lines
257 KiB
JSON
{
|
|
"best_metric": null,
|
|
"best_model_checkpoint": null,
|
|
"epoch": 1.0,
|
|
"global_step": 2181,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.0,
|
|
"learning_rate": 1.5151515151515153e-05,
|
|
"loss": 7.9297,
|
|
"step": 1
|
|
},
|
|
{
|
|
"epoch": 0.0,
|
|
"learning_rate": 3.0303030303030306e-05,
|
|
"loss": 7.8359,
|
|
"step": 2
|
|
},
|
|
{
|
|
"epoch": 0.0,
|
|
"learning_rate": 4.545454545454546e-05,
|
|
"loss": 7.2969,
|
|
"step": 3
|
|
},
|
|
{
|
|
"epoch": 0.0,
|
|
"learning_rate": 6.060606060606061e-05,
|
|
"loss": 5.9219,
|
|
"step": 4
|
|
},
|
|
{
|
|
"epoch": 0.0,
|
|
"learning_rate": 7.575757575757576e-05,
|
|
"loss": 5.5078,
|
|
"step": 5
|
|
},
|
|
{
|
|
"epoch": 0.0,
|
|
"learning_rate": 9.090909090909092e-05,
|
|
"loss": 5.3281,
|
|
"step": 6
|
|
},
|
|
{
|
|
"epoch": 0.0,
|
|
"learning_rate": 0.00010606060606060606,
|
|
"loss": 5.0938,
|
|
"step": 7
|
|
},
|
|
{
|
|
"epoch": 0.0,
|
|
"learning_rate": 0.00012121212121212122,
|
|
"loss": 4.8203,
|
|
"step": 8
|
|
},
|
|
{
|
|
"epoch": 0.0,
|
|
"learning_rate": 0.00013636363636363637,
|
|
"loss": 4.7656,
|
|
"step": 9
|
|
},
|
|
{
|
|
"epoch": 0.0,
|
|
"learning_rate": 0.00015151515151515152,
|
|
"loss": 4.6328,
|
|
"step": 10
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.00016666666666666666,
|
|
"loss": 4.4766,
|
|
"step": 11
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.00018181818181818183,
|
|
"loss": 4.375,
|
|
"step": 12
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.00019696969696969695,
|
|
"loss": 4.4062,
|
|
"step": 13
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.00021212121212121213,
|
|
"loss": 4.3047,
|
|
"step": 14
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.00022727272727272727,
|
|
"loss": 4.2109,
|
|
"step": 15
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.00024242424242424245,
|
|
"loss": 4.0703,
|
|
"step": 16
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.00025757575757575756,
|
|
"loss": 4.1719,
|
|
"step": 17
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.00027272727272727274,
|
|
"loss": 4.0469,
|
|
"step": 18
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.0002878787878787879,
|
|
"loss": 4.0547,
|
|
"step": 19
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.00030303030303030303,
|
|
"loss": 3.8555,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.0003181818181818182,
|
|
"loss": 3.9453,
|
|
"step": 21
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.0003333333333333333,
|
|
"loss": 3.8047,
|
|
"step": 22
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.0003484848484848485,
|
|
"loss": 3.6797,
|
|
"step": 23
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.00036363636363636367,
|
|
"loss": 3.8516,
|
|
"step": 24
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.0003787878787878788,
|
|
"loss": 3.7422,
|
|
"step": 25
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.0003939393939393939,
|
|
"loss": 3.5898,
|
|
"step": 26
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.00040909090909090913,
|
|
"loss": 3.6992,
|
|
"step": 27
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.00042424242424242425,
|
|
"loss": 3.6602,
|
|
"step": 28
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.0004393939393939394,
|
|
"loss": 3.5078,
|
|
"step": 29
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.00045454545454545455,
|
|
"loss": 3.6094,
|
|
"step": 30
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.0004696969696969697,
|
|
"loss": 3.4961,
|
|
"step": 31
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.0004848484848484849,
|
|
"loss": 3.4609,
|
|
"step": 32
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0005,
|
|
"loss": 3.4102,
|
|
"step": 33
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0005151515151515151,
|
|
"loss": 3.4688,
|
|
"step": 34
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0005303030303030302,
|
|
"loss": 3.3672,
|
|
"step": 35
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0005454545454545455,
|
|
"loss": 3.3555,
|
|
"step": 36
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0005606060606060606,
|
|
"loss": 3.4102,
|
|
"step": 37
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0005757575757575758,
|
|
"loss": 3.4375,
|
|
"step": 38
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0005909090909090909,
|
|
"loss": 3.4844,
|
|
"step": 39
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0006060606060606061,
|
|
"loss": 3.4023,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0006212121212121212,
|
|
"loss": 3.2578,
|
|
"step": 41
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0006363636363636364,
|
|
"loss": 3.3516,
|
|
"step": 42
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0006515151515151515,
|
|
"loss": 3.293,
|
|
"step": 43
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0006666666666666666,
|
|
"loss": 3.207,
|
|
"step": 44
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0006818181818181818,
|
|
"loss": 3.125,
|
|
"step": 45
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.000696969696969697,
|
|
"loss": 3.2578,
|
|
"step": 46
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0007121212121212122,
|
|
"loss": 3.2578,
|
|
"step": 47
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0007272727272727273,
|
|
"loss": 3.2148,
|
|
"step": 48
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0007424242424242425,
|
|
"loss": 3.1914,
|
|
"step": 49
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0007575757575757576,
|
|
"loss": 3.2383,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0007727272727272727,
|
|
"loss": 3.1328,
|
|
"step": 51
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0007878787878787878,
|
|
"loss": 3.1562,
|
|
"step": 52
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.000803030303030303,
|
|
"loss": 3.0859,
|
|
"step": 53
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0008181818181818183,
|
|
"loss": 3.1016,
|
|
"step": 54
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.0008333333333333334,
|
|
"loss": 3.1367,
|
|
"step": 55
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.0008484848484848485,
|
|
"loss": 3.1641,
|
|
"step": 56
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.0008636363636363636,
|
|
"loss": 3.0352,
|
|
"step": 57
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.0008787878787878789,
|
|
"loss": 3.0703,
|
|
"step": 58
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.000893939393939394,
|
|
"loss": 3.0195,
|
|
"step": 59
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.0009090909090909091,
|
|
"loss": 3.0391,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.0009242424242424242,
|
|
"loss": 2.9805,
|
|
"step": 61
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.0009393939393939394,
|
|
"loss": 2.9609,
|
|
"step": 62
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.0009545454545454546,
|
|
"loss": 3.0117,
|
|
"step": 63
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.0009696969696969698,
|
|
"loss": 2.8672,
|
|
"step": 64
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.000984848484848485,
|
|
"loss": 2.9805,
|
|
"step": 65
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.001,
|
|
"loss": 2.8398,
|
|
"step": 66
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.0009999994484067654,
|
|
"loss": 2.8789,
|
|
"step": 67
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.0009999977936282788,
|
|
"loss": 2.8633,
|
|
"step": 68
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.0009999950356681913,
|
|
"loss": 2.793,
|
|
"step": 69
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.0009999911745325876,
|
|
"loss": 2.8594,
|
|
"step": 70
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.0009999862102299873,
|
|
"loss": 2.7539,
|
|
"step": 71
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.0009999801427713433,
|
|
"loss": 2.8125,
|
|
"step": 72
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.0009999729721700424,
|
|
"loss": 2.6992,
|
|
"step": 73
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.000999964698441906,
|
|
"loss": 2.8281,
|
|
"step": 74
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.0009999553216051892,
|
|
"loss": 2.7461,
|
|
"step": 75
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.00099994484168058,
|
|
"loss": 2.7461,
|
|
"step": 76
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.0009999332586912019,
|
|
"loss": 2.8242,
|
|
"step": 77
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.0009999205726626108,
|
|
"loss": 2.6172,
|
|
"step": 78
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.000999906783622797,
|
|
"loss": 2.6445,
|
|
"step": 79
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.0009998918916021842,
|
|
"loss": 2.7812,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.0009998758966336297,
|
|
"loss": 2.7734,
|
|
"step": 81
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.0009998587987524242,
|
|
"loss": 2.7344,
|
|
"step": 82
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.0009998405979962926,
|
|
"loss": 2.6562,
|
|
"step": 83
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.000999821294405392,
|
|
"loss": 2.5859,
|
|
"step": 84
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.0009998008880223134,
|
|
"loss": 2.7344,
|
|
"step": 85
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.000999779378892081,
|
|
"loss": 2.5547,
|
|
"step": 86
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.0009997567670621522,
|
|
"loss": 2.6367,
|
|
"step": 87
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.0009997330525824165,
|
|
"loss": 2.582,
|
|
"step": 88
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.0009997082355051976,
|
|
"loss": 2.6055,
|
|
"step": 89
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.000999682315885251,
|
|
"loss": 2.6133,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.0009996552937797645,
|
|
"loss": 2.5586,
|
|
"step": 91
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.0009996271692483596,
|
|
"loss": 2.5938,
|
|
"step": 92
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.0009995979423530893,
|
|
"loss": 2.543,
|
|
"step": 93
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.000999567613158439,
|
|
"loss": 2.5156,
|
|
"step": 94
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.0009995361817313263,
|
|
"loss": 2.5352,
|
|
"step": 95
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.0009995036481411004,
|
|
"loss": 2.543,
|
|
"step": 96
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.0009994700124595429,
|
|
"loss": 2.6836,
|
|
"step": 97
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.0009994352747608663,
|
|
"loss": 2.4688,
|
|
"step": 98
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.0009993994351217151,
|
|
"loss": 2.6055,
|
|
"step": 99
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.000999362493621165,
|
|
"loss": 2.5234,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.0009993244503407226,
|
|
"loss": 2.5938,
|
|
"step": 101
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.0009992853053643258,
|
|
"loss": 2.4727,
|
|
"step": 102
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.0009992450587783426,
|
|
"loss": 2.5039,
|
|
"step": 103
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.000999203710671572,
|
|
"loss": 2.4102,
|
|
"step": 104
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.0009991612611352438,
|
|
"loss": 2.4414,
|
|
"step": 105
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.0009991177102630173,
|
|
"loss": 2.4531,
|
|
"step": 106
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.0009990730581509817,
|
|
"loss": 2.4219,
|
|
"step": 107
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.0009990273048976566,
|
|
"loss": 2.4375,
|
|
"step": 108
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.0009989804506039905,
|
|
"loss": 2.4727,
|
|
"step": 109
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.0009989324953733614,
|
|
"loss": 2.4219,
|
|
"step": 110
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.0009988834393115766,
|
|
"loss": 2.3672,
|
|
"step": 111
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.000998833282526872,
|
|
"loss": 2.4961,
|
|
"step": 112
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.0009987820251299122,
|
|
"loss": 2.4141,
|
|
"step": 113
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.00099872966723379,
|
|
"loss": 2.5664,
|
|
"step": 114
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.0009986762089540266,
|
|
"loss": 2.3672,
|
|
"step": 115
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.0009986216504085709,
|
|
"loss": 2.3477,
|
|
"step": 116
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.0009985659917177991,
|
|
"loss": 2.4062,
|
|
"step": 117
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.0009985092330045155,
|
|
"loss": 2.4453,
|
|
"step": 118
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.0009984513743939508,
|
|
"loss": 2.4297,
|
|
"step": 119
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.0009983924160137626,
|
|
"loss": 2.4492,
|
|
"step": 120
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.000998332357994035,
|
|
"loss": 2.375,
|
|
"step": 121
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.0009982712004672786,
|
|
"loss": 2.4688,
|
|
"step": 122
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.0009982089435684295,
|
|
"loss": 2.4922,
|
|
"step": 123
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.0009981455874348499,
|
|
"loss": 2.4297,
|
|
"step": 124
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.0009980811322063269,
|
|
"loss": 2.3203,
|
|
"step": 125
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.0009980155780250728,
|
|
"loss": 2.3711,
|
|
"step": 126
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.0009979489250357243,
|
|
"loss": 2.3516,
|
|
"step": 127
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.0009978811733853431,
|
|
"loss": 2.4258,
|
|
"step": 128
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.0009978123232234147,
|
|
"loss": 2.375,
|
|
"step": 129
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.000997742374701848,
|
|
"loss": 2.3555,
|
|
"step": 130
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.0009976713279749754,
|
|
"loss": 2.375,
|
|
"step": 131
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.0009975991831995528,
|
|
"loss": 2.3867,
|
|
"step": 132
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.0009975259405347581,
|
|
"loss": 2.3906,
|
|
"step": 133
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.0009974516001421926,
|
|
"loss": 2.3789,
|
|
"step": 134
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.000997376162185878,
|
|
"loss": 2.375,
|
|
"step": 135
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.0009972996268322594,
|
|
"loss": 2.3867,
|
|
"step": 136
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.0009972219942502017,
|
|
"loss": 2.3633,
|
|
"step": 137
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.0009971432646109918,
|
|
"loss": 2.3477,
|
|
"step": 138
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.0009970634380883365,
|
|
"loss": 2.3125,
|
|
"step": 139
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.0009969825148583627,
|
|
"loss": 2.3281,
|
|
"step": 140
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.0009969004950996173,
|
|
"loss": 2.375,
|
|
"step": 141
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.0009968173789930668,
|
|
"loss": 2.3867,
|
|
"step": 142
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.0009967331667220958,
|
|
"loss": 2.3906,
|
|
"step": 143
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.0009966478584725086,
|
|
"loss": 2.4258,
|
|
"step": 144
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.0009965614544325263,
|
|
"loss": 2.375,
|
|
"step": 145
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.000996473954792789,
|
|
"loss": 2.3359,
|
|
"step": 146
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.0009963853597463532,
|
|
"loss": 2.3203,
|
|
"step": 147
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.000996295669488693,
|
|
"loss": 2.4102,
|
|
"step": 148
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.0009962048842176979,
|
|
"loss": 2.3359,
|
|
"step": 149
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.0009961130041336748,
|
|
"loss": 2.332,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.0009960200294393449,
|
|
"loss": 2.2812,
|
|
"step": 151
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.0009959259603398453,
|
|
"loss": 2.3242,
|
|
"step": 152
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.0009958307970427275,
|
|
"loss": 2.3984,
|
|
"step": 153
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.0009957345397579572,
|
|
"loss": 2.3242,
|
|
"step": 154
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.0009956371886979138,
|
|
"loss": 2.3438,
|
|
"step": 155
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.00099553874407739,
|
|
"loss": 2.3828,
|
|
"step": 156
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.0009954392061135916,
|
|
"loss": 2.3242,
|
|
"step": 157
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.0009953385750261364,
|
|
"loss": 2.2969,
|
|
"step": 158
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.0009952368510370538,
|
|
"loss": 2.2773,
|
|
"step": 159
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.0009951340343707852,
|
|
"loss": 2.3086,
|
|
"step": 160
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.0009950301252541823,
|
|
"loss": 2.2305,
|
|
"step": 161
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.0009949251239165075,
|
|
"loss": 2.2695,
|
|
"step": 162
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.000994819030589433,
|
|
"loss": 2.2266,
|
|
"step": 163
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.00099471184550704,
|
|
"loss": 2.2266,
|
|
"step": 164
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.0009946035689058189,
|
|
"loss": 2.2695,
|
|
"step": 165
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.0009944942010246681,
|
|
"loss": 2.3477,
|
|
"step": 166
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.0009943837421048942,
|
|
"loss": 2.332,
|
|
"step": 167
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.0009942721923902106,
|
|
"loss": 2.3164,
|
|
"step": 168
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.0009941595521267377,
|
|
"loss": 2.3516,
|
|
"step": 169
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.0009940458215630017,
|
|
"loss": 2.2578,
|
|
"step": 170
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.0009939310009499348,
|
|
"loss": 2.2656,
|
|
"step": 171
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.000993815090540874,
|
|
"loss": 2.2891,
|
|
"step": 172
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.000993698090591561,
|
|
"loss": 2.3086,
|
|
"step": 173
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.0009935800013601416,
|
|
"loss": 2.2188,
|
|
"step": 174
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.000993460823107164,
|
|
"loss": 2.3789,
|
|
"step": 175
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.0009933405560955803,
|
|
"loss": 2.3594,
|
|
"step": 176
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.0009932192005907446,
|
|
"loss": 2.2344,
|
|
"step": 177
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.0009930967568604118,
|
|
"loss": 2.3438,
|
|
"step": 178
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.000992973225174739,
|
|
"loss": 2.3008,
|
|
"step": 179
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.0009928486058062827,
|
|
"loss": 2.2852,
|
|
"step": 180
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.0009927228990299999,
|
|
"loss": 2.2656,
|
|
"step": 181
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.0009925961051232468,
|
|
"loss": 2.3008,
|
|
"step": 182
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.000992468224365778,
|
|
"loss": 2.3633,
|
|
"step": 183
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.000992339257039746,
|
|
"loss": 2.2695,
|
|
"step": 184
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.0009922092034297006,
|
|
"loss": 2.2852,
|
|
"step": 185
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.0009920780638225891,
|
|
"loss": 2.2305,
|
|
"step": 186
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.0009919458385077538,
|
|
"loss": 2.3203,
|
|
"step": 187
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.0009918125277769336,
|
|
"loss": 2.2734,
|
|
"step": 188
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.0009916781319242614,
|
|
"loss": 2.3906,
|
|
"step": 189
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.0009915426512462646,
|
|
"loss": 2.2227,
|
|
"step": 190
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.0009914060860418644,
|
|
"loss": 2.1602,
|
|
"step": 191
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.000991268436612374,
|
|
"loss": 2.2344,
|
|
"step": 192
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.0009911297032614997,
|
|
"loss": 2.25,
|
|
"step": 193
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.000990989886295339,
|
|
"loss": 2.3164,
|
|
"step": 194
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.0009908489860223804,
|
|
"loss": 2.2539,
|
|
"step": 195
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.000990707002753502,
|
|
"loss": 2.2656,
|
|
"step": 196
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.0009905639368019724,
|
|
"loss": 2.2578,
|
|
"step": 197
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.0009904197884834482,
|
|
"loss": 2.2539,
|
|
"step": 198
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.0009902745581159742,
|
|
"loss": 2.1914,
|
|
"step": 199
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.0009901282460199829,
|
|
"loss": 2.2812,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.0009899808525182935,
|
|
"loss": 2.3203,
|
|
"step": 201
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.0009898323779361107,
|
|
"loss": 2.1914,
|
|
"step": 202
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.000989682822601025,
|
|
"loss": 2.1797,
|
|
"step": 203
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.0009895321868430113,
|
|
"loss": 2.168,
|
|
"step": 204
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.0009893804709944281,
|
|
"loss": 2.2734,
|
|
"step": 205
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.0009892276753900174,
|
|
"loss": 2.3203,
|
|
"step": 206
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.0009890738003669028,
|
|
"loss": 2.2227,
|
|
"step": 207
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.0009889188462645904,
|
|
"loss": 2.2773,
|
|
"step": 208
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.0009887628134249667,
|
|
"loss": 2.2148,
|
|
"step": 209
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.0009886057021922983,
|
|
"loss": 2.2773,
|
|
"step": 210
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.0009884475129132311,
|
|
"loss": 2.3086,
|
|
"step": 211
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.0009882882459367897,
|
|
"loss": 2.2617,
|
|
"step": 212
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.0009881279016143766,
|
|
"loss": 2.2695,
|
|
"step": 213
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.0009879664802997707,
|
|
"loss": 2.2852,
|
|
"step": 214
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.000987803982349128,
|
|
"loss": 2.207,
|
|
"step": 215
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.0009876404081209796,
|
|
"loss": 2.2695,
|
|
"step": 216
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.000987475757976231,
|
|
"loss": 2.2461,
|
|
"step": 217
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.000987310032278162,
|
|
"loss": 2.3398,
|
|
"step": 218
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.0009871432313924254,
|
|
"loss": 2.293,
|
|
"step": 219
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.000986975355687046,
|
|
"loss": 2.2773,
|
|
"step": 220
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.0009868064055324204,
|
|
"loss": 2.2188,
|
|
"step": 221
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.0009866363813013153,
|
|
"loss": 2.2383,
|
|
"step": 222
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.0009864652833688676,
|
|
"loss": 2.2812,
|
|
"step": 223
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.0009862931121125836,
|
|
"loss": 2.2969,
|
|
"step": 224
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.000986119867912337,
|
|
"loss": 2.2344,
|
|
"step": 225
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.000985945551150369,
|
|
"loss": 2.1797,
|
|
"step": 226
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.0009857701622112876,
|
|
"loss": 2.3008,
|
|
"step": 227
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.000985593701482066,
|
|
"loss": 2.2461,
|
|
"step": 228
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.0009854161693520424,
|
|
"loss": 2.3125,
|
|
"step": 229
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.0009852375662129194,
|
|
"loss": 2.1914,
|
|
"step": 230
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.0009850578924587613,
|
|
"loss": 2.3477,
|
|
"step": 231
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.000984877148485996,
|
|
"loss": 2.2969,
|
|
"step": 232
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.000984695334693412,
|
|
"loss": 2.1914,
|
|
"step": 233
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.000984512451482158,
|
|
"loss": 2.2344,
|
|
"step": 234
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.0009843284992557431,
|
|
"loss": 2.2266,
|
|
"step": 235
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.000984143478420034,
|
|
"loss": 2.2539,
|
|
"step": 236
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.0009839573893832563,
|
|
"loss": 2.2734,
|
|
"step": 237
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.000983770232555991,
|
|
"loss": 2.2539,
|
|
"step": 238
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.0009835820083511765,
|
|
"loss": 2.2383,
|
|
"step": 239
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.0009833927171841055,
|
|
"loss": 2.2344,
|
|
"step": 240
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.0009832023594724246,
|
|
"loss": 2.1484,
|
|
"step": 241
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.0009830109356361344,
|
|
"loss": 2.2656,
|
|
"step": 242
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.0009828184460975867,
|
|
"loss": 2.2734,
|
|
"step": 243
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.0009826248912814855,
|
|
"loss": 2.2656,
|
|
"step": 244
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.0009824302716148847,
|
|
"loss": 2.3242,
|
|
"step": 245
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.0009822345875271884,
|
|
"loss": 2.2266,
|
|
"step": 246
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.0009820378394501481,
|
|
"loss": 2.3008,
|
|
"step": 247
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.0009818400278178636,
|
|
"loss": 2.2617,
|
|
"step": 248
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.0009816411530667814,
|
|
"loss": 2.2383,
|
|
"step": 249
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.000981441215635693,
|
|
"loss": 2.1797,
|
|
"step": 250
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.0009812402159657353,
|
|
"loss": 2.2383,
|
|
"step": 251
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.000981038154500388,
|
|
"loss": 2.1836,
|
|
"step": 252
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.0009808350316854746,
|
|
"loss": 2.1875,
|
|
"step": 253
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.0009806308479691594,
|
|
"loss": 2.1758,
|
|
"step": 254
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.0009804256038019482,
|
|
"loss": 2.2656,
|
|
"step": 255
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.0009802192996366857,
|
|
"loss": 2.2148,
|
|
"step": 256
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.0009800119359285563,
|
|
"loss": 2.2539,
|
|
"step": 257
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.0009798035131350813,
|
|
"loss": 2.1562,
|
|
"step": 258
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.0009795940317161194,
|
|
"loss": 2.2383,
|
|
"step": 259
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.0009793834921338646,
|
|
"loss": 2.1875,
|
|
"step": 260
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.0009791718948528457,
|
|
"loss": 2.3125,
|
|
"step": 261
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.0009789592403399252,
|
|
"loss": 2.1992,
|
|
"step": 262
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.0009787455290642985,
|
|
"loss": 2.2461,
|
|
"step": 263
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.000978530761497492,
|
|
"loss": 2.3594,
|
|
"step": 264
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.0009783149381133633,
|
|
"loss": 2.2656,
|
|
"step": 265
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.0009780980593880992,
|
|
"loss": 2.2422,
|
|
"step": 266
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.0009778801258002153,
|
|
"loss": 2.1914,
|
|
"step": 267
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.000977661137830554,
|
|
"loss": 2.2422,
|
|
"step": 268
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.0009774410959622845,
|
|
"loss": 2.2188,
|
|
"step": 269
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.000977220000680901,
|
|
"loss": 2.1914,
|
|
"step": 270
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.000976997852474223,
|
|
"loss": 2.2031,
|
|
"step": 271
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.0009767746518323914,
|
|
"loss": 2.2344,
|
|
"step": 272
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.0009765503992478704,
|
|
"loss": 2.1797,
|
|
"step": 273
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.0009763250952154449,
|
|
"loss": 2.2734,
|
|
"step": 274
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.0009760987402322195,
|
|
"loss": 2.1992,
|
|
"step": 275
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.0009758713347976178,
|
|
"loss": 2.2148,
|
|
"step": 276
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.000975642879413381,
|
|
"loss": 2.2461,
|
|
"step": 277
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.0009754133745835665,
|
|
"loss": 2.2773,
|
|
"step": 278
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.0009751828208145482,
|
|
"loss": 2.2539,
|
|
"step": 279
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.0009749512186150131,
|
|
"loss": 2.25,
|
|
"step": 280
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.0009747185684959625,
|
|
"loss": 2.2148,
|
|
"step": 281
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.000974484870970709,
|
|
"loss": 2.1641,
|
|
"step": 282
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.0009742501265548767,
|
|
"loss": 2.1797,
|
|
"step": 283
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.0009740143357663993,
|
|
"loss": 2.2109,
|
|
"step": 284
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.000973777499125519,
|
|
"loss": 2.2031,
|
|
"step": 285
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.0009735396171547859,
|
|
"loss": 2.2617,
|
|
"step": 286
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.0009733006903790564,
|
|
"loss": 2.2578,
|
|
"step": 287
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.0009730607193254922,
|
|
"loss": 2.2734,
|
|
"step": 288
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.0009728197045235585,
|
|
"loss": 2.1523,
|
|
"step": 289
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.0009725776465050242,
|
|
"loss": 2.2383,
|
|
"step": 290
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.0009723345458039594,
|
|
"loss": 2.207,
|
|
"step": 291
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.000972090402956735,
|
|
"loss": 2.2031,
|
|
"step": 292
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.0009718452185020212,
|
|
"loss": 2.1445,
|
|
"step": 293
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.0009715989929807862,
|
|
"loss": 2.2227,
|
|
"step": 294
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009713517269362955,
|
|
"loss": 2.207,
|
|
"step": 295
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009711034209141101,
|
|
"loss": 2.2031,
|
|
"step": 296
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009708540754620856,
|
|
"loss": 2.2578,
|
|
"step": 297
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009706036911303713,
|
|
"loss": 2.2422,
|
|
"step": 298
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009703522684714083,
|
|
"loss": 2.2188,
|
|
"step": 299
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009700998080399286,
|
|
"loss": 2.1641,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009698463103929542,
|
|
"loss": 2.168,
|
|
"step": 301
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009695917760897954,
|
|
"loss": 2.2109,
|
|
"step": 302
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009693362056920501,
|
|
"loss": 2.1758,
|
|
"step": 303
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009690795997636015,
|
|
"loss": 2.1992,
|
|
"step": 304
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009688219588706179,
|
|
"loss": 2.2461,
|
|
"step": 305
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009685632835815518,
|
|
"loss": 2.1484,
|
|
"step": 306
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009683035744671367,
|
|
"loss": 2.2227,
|
|
"step": 307
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009680428321003883,
|
|
"loss": 2.2266,
|
|
"step": 308
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.000967781057056601,
|
|
"loss": 2.2188,
|
|
"step": 309
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009675182499133485,
|
|
"loss": 2.2227,
|
|
"step": 310
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009672544112504813,
|
|
"loss": 2.168,
|
|
"step": 311
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009669895416501257,
|
|
"loss": 2.1758,
|
|
"step": 312
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009667236416966833,
|
|
"loss": 2.1992,
|
|
"step": 313
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009664567119768281,
|
|
"loss": 2.2031,
|
|
"step": 314
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009661887530795067,
|
|
"loss": 2.1758,
|
|
"step": 315
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0009659197655959365,
|
|
"loss": 2.1406,
|
|
"step": 316
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.000965649750119604,
|
|
"loss": 2.2012,
|
|
"step": 317
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.0009653787072462643,
|
|
"loss": 2.2305,
|
|
"step": 318
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.0009651066375739388,
|
|
"loss": 2.2812,
|
|
"step": 319
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.000964833541702915,
|
|
"loss": 2.1875,
|
|
"step": 320
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.0009645594202357438,
|
|
"loss": 2.1055,
|
|
"step": 321
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.0009642842737772397,
|
|
"loss": 2.1484,
|
|
"step": 322
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.0009640081029344782,
|
|
"loss": 2.2109,
|
|
"step": 323
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.0009637309083167956,
|
|
"loss": 2.168,
|
|
"step": 324
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.0009634526905357859,
|
|
"loss": 2.2656,
|
|
"step": 325
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.000963173450205302,
|
|
"loss": 2.2266,
|
|
"step": 326
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.0009628931879414517,
|
|
"loss": 2.2148,
|
|
"step": 327
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.0009626119043625983,
|
|
"loss": 2.2617,
|
|
"step": 328
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.0009623296000893582,
|
|
"loss": 2.168,
|
|
"step": 329
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.0009620462757446,
|
|
"loss": 2.2461,
|
|
"step": 330
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.0009617619319534428,
|
|
"loss": 2.2773,
|
|
"step": 331
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.000961476569343255,
|
|
"loss": 2.2617,
|
|
"step": 332
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.0009611901885436529,
|
|
"loss": 2.2031,
|
|
"step": 333
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.0009609027901864996,
|
|
"loss": 2.1953,
|
|
"step": 334
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.0009606143749059029,
|
|
"loss": 2.2461,
|
|
"step": 335
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.0009603249433382144,
|
|
"loss": 2.1523,
|
|
"step": 336
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.0009600344961220282,
|
|
"loss": 2.2188,
|
|
"step": 337
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.0009597430338981791,
|
|
"loss": 2.1758,
|
|
"step": 338
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0009594505573097414,
|
|
"loss": 2.1523,
|
|
"step": 339
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0009591570670020277,
|
|
"loss": 2.2344,
|
|
"step": 340
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0009588625636225871,
|
|
"loss": 2.1875,
|
|
"step": 341
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0009585670478212036,
|
|
"loss": 2.1758,
|
|
"step": 342
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0009582705202498956,
|
|
"loss": 2.2266,
|
|
"step": 343
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0009579729815629133,
|
|
"loss": 2.1328,
|
|
"step": 344
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0009576744324167379,
|
|
"loss": 2.2852,
|
|
"step": 345
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0009573748734700804,
|
|
"loss": 2.207,
|
|
"step": 346
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0009570743053838796,
|
|
"loss": 2.2578,
|
|
"step": 347
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0009567727288213005,
|
|
"loss": 2.1836,
|
|
"step": 348
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0009564701444477337,
|
|
"loss": 2.2344,
|
|
"step": 349
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.000956166552930793,
|
|
"loss": 2.1719,
|
|
"step": 350
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0009558619549403147,
|
|
"loss": 2.1836,
|
|
"step": 351
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0009555563511483555,
|
|
"loss": 2.2266,
|
|
"step": 352
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0009552497422291912,
|
|
"loss": 2.1992,
|
|
"step": 353
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0009549421288593157,
|
|
"loss": 2.2461,
|
|
"step": 354
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0009546335117174385,
|
|
"loss": 2.207,
|
|
"step": 355
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0009543238914844843,
|
|
"loss": 2.1992,
|
|
"step": 356
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0009540132688435907,
|
|
"loss": 2.0898,
|
|
"step": 357
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0009537016444801074,
|
|
"loss": 2.2188,
|
|
"step": 358
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0009533890190815935,
|
|
"loss": 2.1914,
|
|
"step": 359
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0009530753933378173,
|
|
"loss": 2.2227,
|
|
"step": 360
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0009527607679407545,
|
|
"loss": 2.1562,
|
|
"step": 361
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0009524451435845857,
|
|
"loss": 2.1602,
|
|
"step": 362
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0009521285209656963,
|
|
"loss": 2.1875,
|
|
"step": 363
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0009518109007826734,
|
|
"loss": 2.1719,
|
|
"step": 364
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0009514922837363059,
|
|
"loss": 2.207,
|
|
"step": 365
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0009511726705295817,
|
|
"loss": 2.2148,
|
|
"step": 366
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.000950852061867687,
|
|
"loss": 2.2344,
|
|
"step": 367
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0009505304584580038,
|
|
"loss": 2.1328,
|
|
"step": 368
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0009502078610101092,
|
|
"loss": 2.2422,
|
|
"step": 369
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0009498842702357736,
|
|
"loss": 2.1562,
|
|
"step": 370
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0009495596868489587,
|
|
"loss": 2.168,
|
|
"step": 371
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0009492341115658167,
|
|
"loss": 2.1484,
|
|
"step": 372
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0009489075451046879,
|
|
"loss": 2.1758,
|
|
"step": 373
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0009485799881861,
|
|
"loss": 2.207,
|
|
"step": 374
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0009482514415327654,
|
|
"loss": 2.3203,
|
|
"step": 375
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.000947921905869581,
|
|
"loss": 2.1562,
|
|
"step": 376
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0009475913819236248,
|
|
"loss": 2.1602,
|
|
"step": 377
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0009472598704241561,
|
|
"loss": 2.2031,
|
|
"step": 378
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0009469273721026131,
|
|
"loss": 2.2422,
|
|
"step": 379
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0009465938876926111,
|
|
"loss": 2.1445,
|
|
"step": 380
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0009462594179299406,
|
|
"loss": 2.1172,
|
|
"step": 381
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0009459239635525672,
|
|
"loss": 2.1406,
|
|
"step": 382
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0009455875253006281,
|
|
"loss": 2.2617,
|
|
"step": 383
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0009452501039164315,
|
|
"loss": 2.2539,
|
|
"step": 384
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0009449117001444549,
|
|
"loss": 2.1914,
|
|
"step": 385
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0009445723147313433,
|
|
"loss": 2.1602,
|
|
"step": 386
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0009442319484259074,
|
|
"loss": 2.1094,
|
|
"step": 387
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0009438906019791222,
|
|
"loss": 2.1875,
|
|
"step": 388
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0009435482761441251,
|
|
"loss": 2.2305,
|
|
"step": 389
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.000943204971676215,
|
|
"loss": 2.1641,
|
|
"step": 390
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0009428606893328493,
|
|
"loss": 2.1328,
|
|
"step": 391
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0009425154298736432,
|
|
"loss": 2.2461,
|
|
"step": 392
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0009421691940603678,
|
|
"loss": 2.2852,
|
|
"step": 393
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0009418219826569488,
|
|
"loss": 2.0957,
|
|
"step": 394
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0009414737964294635,
|
|
"loss": 2.2188,
|
|
"step": 395
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.000941124636146141,
|
|
"loss": 2.1953,
|
|
"step": 396
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0009407745025773589,
|
|
"loss": 2.1328,
|
|
"step": 397
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0009404233964956423,
|
|
"loss": 2.2344,
|
|
"step": 398
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0009400713186756625,
|
|
"loss": 2.1562,
|
|
"step": 399
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0009397182698942342,
|
|
"loss": 2.1953,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0009393642509303149,
|
|
"loss": 2.2891,
|
|
"step": 401
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0009390092625650023,
|
|
"loss": 2.1797,
|
|
"step": 402
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0009386533055815332,
|
|
"loss": 2.1406,
|
|
"step": 403
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.0009382963807652813,
|
|
"loss": 2.1562,
|
|
"step": 404
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.000937938488903756,
|
|
"loss": 2.2305,
|
|
"step": 405
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.0009375796307866003,
|
|
"loss": 2.1523,
|
|
"step": 406
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.0009372198072055888,
|
|
"loss": 2.1289,
|
|
"step": 407
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.0009368590189546268,
|
|
"loss": 2.2656,
|
|
"step": 408
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.0009364972668297474,
|
|
"loss": 2.2227,
|
|
"step": 409
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.0009361345516291111,
|
|
"loss": 2.1602,
|
|
"step": 410
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.0009357708741530024,
|
|
"loss": 2.1719,
|
|
"step": 411
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.00093540623520383,
|
|
"loss": 2.1602,
|
|
"step": 412
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.000935040635586123,
|
|
"loss": 2.2109,
|
|
"step": 413
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.0009346740761065305,
|
|
"loss": 2.2109,
|
|
"step": 414
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.0009343065575738197,
|
|
"loss": 2.1602,
|
|
"step": 415
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.0009339380807988733,
|
|
"loss": 2.1953,
|
|
"step": 416
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.0009335686465946887,
|
|
"loss": 2.293,
|
|
"step": 417
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.0009331982557763754,
|
|
"loss": 2.1016,
|
|
"step": 418
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.0009328269091611537,
|
|
"loss": 2.2227,
|
|
"step": 419
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.0009324546075683524,
|
|
"loss": 2.1836,
|
|
"step": 420
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.0009320813518194083,
|
|
"loss": 2.0703,
|
|
"step": 421
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.0009317071427378624,
|
|
"loss": 2.125,
|
|
"step": 422
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.0009313319811493594,
|
|
"loss": 2.2305,
|
|
"step": 423
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.000930955867881646,
|
|
"loss": 2.1328,
|
|
"step": 424
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.0009305788037645681,
|
|
"loss": 2.1914,
|
|
"step": 425
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0009302007896300697,
|
|
"loss": 2.1641,
|
|
"step": 426
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0009298218263121911,
|
|
"loss": 2.1641,
|
|
"step": 427
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0009294419146470668,
|
|
"loss": 2.1562,
|
|
"step": 428
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0009290610554729234,
|
|
"loss": 2.1914,
|
|
"step": 429
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0009286792496300784,
|
|
"loss": 2.2188,
|
|
"step": 430
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0009282964979609379,
|
|
"loss": 2.2266,
|
|
"step": 431
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0009279128013099947,
|
|
"loss": 2.168,
|
|
"step": 432
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0009275281605238268,
|
|
"loss": 2.1875,
|
|
"step": 433
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0009271425764510953,
|
|
"loss": 2.1523,
|
|
"step": 434
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0009267560499425423,
|
|
"loss": 2.1719,
|
|
"step": 435
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0009263685818509895,
|
|
"loss": 2.1445,
|
|
"step": 436
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.000925980173031336,
|
|
"loss": 2.1562,
|
|
"step": 437
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0009255908243405567,
|
|
"loss": 2.1406,
|
|
"step": 438
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0009252005366376996,
|
|
"loss": 2.2305,
|
|
"step": 439
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0009248093107838852,
|
|
"loss": 2.1172,
|
|
"step": 440
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0009244171476423036,
|
|
"loss": 2.1992,
|
|
"step": 441
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0009240240480782129,
|
|
"loss": 2.1562,
|
|
"step": 442
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0009236300129589376,
|
|
"loss": 2.2109,
|
|
"step": 443
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0009232350431538657,
|
|
"loss": 2.1797,
|
|
"step": 444
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0009228391395344482,
|
|
"loss": 2.207,
|
|
"step": 445
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.000922442302974196,
|
|
"loss": 2.2109,
|
|
"step": 446
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0009220445343486785,
|
|
"loss": 2.2148,
|
|
"step": 447
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.0009216458345355217,
|
|
"loss": 2.1602,
|
|
"step": 448
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.0009212462044144061,
|
|
"loss": 2.0742,
|
|
"step": 449
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.0009208456448670648,
|
|
"loss": 2.0586,
|
|
"step": 450
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.0009204441567772816,
|
|
"loss": 2.1992,
|
|
"step": 451
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.0009200417410308888,
|
|
"loss": 2.1953,
|
|
"step": 452
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.0009196383985157656,
|
|
"loss": 2.1602,
|
|
"step": 453
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.000919234130121836,
|
|
"loss": 2.1875,
|
|
"step": 454
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.0009188289367410672,
|
|
"loss": 2.0977,
|
|
"step": 455
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.0009184228192674666,
|
|
"loss": 2.0898,
|
|
"step": 456
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.0009180157785970808,
|
|
"loss": 2.1914,
|
|
"step": 457
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.0009176078156279932,
|
|
"loss": 2.1719,
|
|
"step": 458
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.0009171989312603226,
|
|
"loss": 2.1836,
|
|
"step": 459
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.0009167891263962202,
|
|
"loss": 2.1133,
|
|
"step": 460
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.0009163784019398685,
|
|
"loss": 2.2617,
|
|
"step": 461
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.0009159667587974785,
|
|
"loss": 2.0957,
|
|
"step": 462
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.0009155541978772887,
|
|
"loss": 2.1152,
|
|
"step": 463
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.0009151407200895625,
|
|
"loss": 2.1836,
|
|
"step": 464
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.000914726326346586,
|
|
"loss": 2.1719,
|
|
"step": 465
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.0009143110175626661,
|
|
"loss": 2.1445,
|
|
"step": 466
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.0009138947946541291,
|
|
"loss": 2.1602,
|
|
"step": 467
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.0009134776585393181,
|
|
"loss": 2.0547,
|
|
"step": 468
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009130596101385906,
|
|
"loss": 2.2383,
|
|
"step": 469
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009126406503743174,
|
|
"loss": 2.1953,
|
|
"step": 470
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009122207801708802,
|
|
"loss": 2.1133,
|
|
"step": 471
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009118000004546689,
|
|
"loss": 2.0879,
|
|
"step": 472
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009113783121540807,
|
|
"loss": 2.1484,
|
|
"step": 473
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009109557161995172,
|
|
"loss": 2.2227,
|
|
"step": 474
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009105322135233828,
|
|
"loss": 2.1172,
|
|
"step": 475
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009101078050600821,
|
|
"loss": 2.2305,
|
|
"step": 476
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009096824917460186,
|
|
"loss": 2.1602,
|
|
"step": 477
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009092562745195921,
|
|
"loss": 2.2109,
|
|
"step": 478
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009088291543211967,
|
|
"loss": 2.1172,
|
|
"step": 479
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009084011320932188,
|
|
"loss": 2.1445,
|
|
"step": 480
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009079722087800352,
|
|
"loss": 2.0781,
|
|
"step": 481
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009075423853280106,
|
|
"loss": 2.1367,
|
|
"step": 482
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009071116626854958,
|
|
"loss": 2.2266,
|
|
"step": 483
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009066800418028256,
|
|
"loss": 2.0859,
|
|
"step": 484
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009062475236323168,
|
|
"loss": 2.1328,
|
|
"step": 485
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009058141091282656,
|
|
"loss": 2.0898,
|
|
"step": 486
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009053797992469461,
|
|
"loss": 2.2656,
|
|
"step": 487
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009049445949466078,
|
|
"loss": 2.1914,
|
|
"step": 488
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009045084971874737,
|
|
"loss": 2.0957,
|
|
"step": 489
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0009040715069317382,
|
|
"loss": 2.1074,
|
|
"step": 490
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.0009036336251435648,
|
|
"loss": 2.1484,
|
|
"step": 491
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.0009031948527890839,
|
|
"loss": 2.2344,
|
|
"step": 492
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.000902755190836391,
|
|
"loss": 2.1484,
|
|
"step": 493
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.0009023146402555442,
|
|
"loss": 2.2812,
|
|
"step": 494
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.0009018732020185624,
|
|
"loss": 2.1719,
|
|
"step": 495
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.0009014308770994235,
|
|
"loss": 2.1016,
|
|
"step": 496
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.0009009876664740605,
|
|
"loss": 2.1367,
|
|
"step": 497
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.0009005435711203618,
|
|
"loss": 2.1406,
|
|
"step": 498
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.000900098592018167,
|
|
"loss": 2.0762,
|
|
"step": 499
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.0008996527301492663,
|
|
"loss": 2.1719,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.0008992059864973972,
|
|
"loss": 2.0859,
|
|
"step": 501
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.0008987583620482427,
|
|
"loss": 2.1641,
|
|
"step": 502
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.0008983098577894292,
|
|
"loss": 2.1523,
|
|
"step": 503
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.0008978604747105246,
|
|
"loss": 2.1055,
|
|
"step": 504
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.0008974102138030354,
|
|
"loss": 2.1523,
|
|
"step": 505
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.000896959076060405,
|
|
"loss": 2.1367,
|
|
"step": 506
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.0008965070624780116,
|
|
"loss": 2.084,
|
|
"step": 507
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.0008960541740531658,
|
|
"loss": 2.1875,
|
|
"step": 508
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.0008956004117851083,
|
|
"loss": 2.168,
|
|
"step": 509
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.0008951457766750079,
|
|
"loss": 2.1602,
|
|
"step": 510
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.0008946902697259593,
|
|
"loss": 2.1172,
|
|
"step": 511
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.0008942338919429805,
|
|
"loss": 2.1367,
|
|
"step": 512
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008937766443330113,
|
|
"loss": 2.1016,
|
|
"step": 513
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008933185279049103,
|
|
"loss": 2.2344,
|
|
"step": 514
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008928595436694532,
|
|
"loss": 2.1914,
|
|
"step": 515
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008923996926393305,
|
|
"loss": 2.0586,
|
|
"step": 516
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008919389758291449,
|
|
"loss": 2.1641,
|
|
"step": 517
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008914773942554098,
|
|
"loss": 2.1562,
|
|
"step": 518
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.000891014948936546,
|
|
"loss": 2.2305,
|
|
"step": 519
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008905516408928804,
|
|
"loss": 2.2344,
|
|
"step": 520
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008900874711466434,
|
|
"loss": 2.1016,
|
|
"step": 521
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008896224407219666,
|
|
"loss": 2.2148,
|
|
"step": 522
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008891565506448804,
|
|
"loss": 2.2227,
|
|
"step": 523
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008886898019433122,
|
|
"loss": 2.0508,
|
|
"step": 524
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008882221956470836,
|
|
"loss": 2.1719,
|
|
"step": 525
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008877537327879086,
|
|
"loss": 2.168,
|
|
"step": 526
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008872844143993908,
|
|
"loss": 2.1094,
|
|
"step": 527
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008868142415170218,
|
|
"loss": 2.1641,
|
|
"step": 528
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008863432151781781,
|
|
"loss": 2.2031,
|
|
"step": 529
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008858713364221195,
|
|
"loss": 2.1191,
|
|
"step": 530
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008853986062899868,
|
|
"loss": 2.1289,
|
|
"step": 531
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008849250258247986,
|
|
"loss": 2.1602,
|
|
"step": 532
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008844505960714503,
|
|
"loss": 2.0859,
|
|
"step": 533
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0008839753180767108,
|
|
"loss": 2.1055,
|
|
"step": 534
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.0008834991928892204,
|
|
"loss": 2.2148,
|
|
"step": 535
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.000883022221559489,
|
|
"loss": 2.1328,
|
|
"step": 536
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.0008825444051398934,
|
|
"loss": 2.1445,
|
|
"step": 537
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.0008820657446846745,
|
|
"loss": 2.1953,
|
|
"step": 538
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.000881586241249936,
|
|
"loss": 2.1953,
|
|
"step": 539
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.0008811058958936411,
|
|
"loss": 2.168,
|
|
"step": 540
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.000880624709675611,
|
|
"loss": 2.1445,
|
|
"step": 541
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.000880142683657522,
|
|
"loss": 2.1562,
|
|
"step": 542
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.0008796598189029029,
|
|
"loss": 2.0898,
|
|
"step": 543
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.0008791761164771338,
|
|
"loss": 2.1211,
|
|
"step": 544
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.0008786915774474424,
|
|
"loss": 2.0625,
|
|
"step": 545
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.0008782062028829027,
|
|
"loss": 2.1055,
|
|
"step": 546
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.0008777199938544318,
|
|
"loss": 2.1328,
|
|
"step": 547
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.0008772329514347883,
|
|
"loss": 2.0352,
|
|
"step": 548
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.0008767450766985694,
|
|
"loss": 2.1562,
|
|
"step": 549
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.0008762563707222086,
|
|
"loss": 2.1797,
|
|
"step": 550
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.0008757668345839738,
|
|
"loss": 2.1992,
|
|
"step": 551
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.0008752764693639638,
|
|
"loss": 2.2266,
|
|
"step": 552
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.0008747852761441078,
|
|
"loss": 2.0742,
|
|
"step": 553
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.0008742932560081607,
|
|
"loss": 2.1133,
|
|
"step": 554
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.0008738004100417025,
|
|
"loss": 2.0859,
|
|
"step": 555
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.0008733067393321355,
|
|
"loss": 2.1875,
|
|
"step": 556
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.000872812244968681,
|
|
"loss": 2.1719,
|
|
"step": 557
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.0008723169280423783,
|
|
"loss": 2.2227,
|
|
"step": 558
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.0008718207896460811,
|
|
"loss": 2.1836,
|
|
"step": 559
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.0008713238308744557,
|
|
"loss": 2.25,
|
|
"step": 560
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.0008708260528239789,
|
|
"loss": 2.0801,
|
|
"step": 561
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.000870327456592934,
|
|
"loss": 2.1641,
|
|
"step": 562
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.0008698280432814107,
|
|
"loss": 2.1484,
|
|
"step": 563
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.000869327813991301,
|
|
"loss": 2.1836,
|
|
"step": 564
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.0008688267698262971,
|
|
"loss": 2.1719,
|
|
"step": 565
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.0008683249118918894,
|
|
"loss": 2.1211,
|
|
"step": 566
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.0008678222412953637,
|
|
"loss": 2.1797,
|
|
"step": 567
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.0008673187591457987,
|
|
"loss": 2.2812,
|
|
"step": 568
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.0008668144665540639,
|
|
"loss": 2.1484,
|
|
"step": 569
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.0008663093646328167,
|
|
"loss": 2.1562,
|
|
"step": 570
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.0008658034544965003,
|
|
"loss": 2.1289,
|
|
"step": 571
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.0008652967372613412,
|
|
"loss": 2.1562,
|
|
"step": 572
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.0008647892140453466,
|
|
"loss": 2.1641,
|
|
"step": 573
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.0008642808859683021,
|
|
"loss": 2.082,
|
|
"step": 574
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.0008637717541517689,
|
|
"loss": 2.1836,
|
|
"step": 575
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.0008632618197190816,
|
|
"loss": 2.1328,
|
|
"step": 576
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.0008627510837953458,
|
|
"loss": 2.1758,
|
|
"step": 577
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.0008622395475074355,
|
|
"loss": 2.127,
|
|
"step": 578
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.0008617272119839903,
|
|
"loss": 2.1523,
|
|
"step": 579
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.0008612140783554136,
|
|
"loss": 2.1367,
|
|
"step": 580
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.0008607001477538696,
|
|
"loss": 2.1445,
|
|
"step": 581
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.0008601854213132807,
|
|
"loss": 2.2344,
|
|
"step": 582
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.0008596699001693256,
|
|
"loss": 2.0898,
|
|
"step": 583
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.000859153585459436,
|
|
"loss": 2.1289,
|
|
"step": 584
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.0008586364783227949,
|
|
"loss": 2.082,
|
|
"step": 585
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.0008581185799003332,
|
|
"loss": 2.1445,
|
|
"step": 586
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.0008575998913347283,
|
|
"loss": 2.1211,
|
|
"step": 587
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.0008570804137704004,
|
|
"loss": 2.0723,
|
|
"step": 588
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.0008565601483535108,
|
|
"loss": 2.0508,
|
|
"step": 589
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.0008560390962319591,
|
|
"loss": 2.0723,
|
|
"step": 590
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.0008555172585553804,
|
|
"loss": 2.168,
|
|
"step": 591
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.0008549946364751435,
|
|
"loss": 2.1172,
|
|
"step": 592
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.0008544712311443475,
|
|
"loss": 2.1641,
|
|
"step": 593
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.0008539470437178196,
|
|
"loss": 2.127,
|
|
"step": 594
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.000853422075352113,
|
|
"loss": 2.1484,
|
|
"step": 595
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.0008528963272055035,
|
|
"loss": 2.1367,
|
|
"step": 596
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.0008523698004379877,
|
|
"loss": 2.1094,
|
|
"step": 597
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.00085184249621128,
|
|
"loss": 2.2344,
|
|
"step": 598
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.0008513144156888101,
|
|
"loss": 2.207,
|
|
"step": 599
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008507855600357207,
|
|
"loss": 2.2109,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008502559304188644,
|
|
"loss": 2.1875,
|
|
"step": 601
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008497255280068019,
|
|
"loss": 2.1797,
|
|
"step": 602
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008491943539697986,
|
|
"loss": 2.0645,
|
|
"step": 603
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008486624094798226,
|
|
"loss": 2.0957,
|
|
"step": 604
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008481296957105417,
|
|
"loss": 2.1641,
|
|
"step": 605
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008475962138373213,
|
|
"loss": 2.1426,
|
|
"step": 606
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008470619650372211,
|
|
"loss": 2.1445,
|
|
"step": 607
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008465269504889934,
|
|
"loss": 2.1738,
|
|
"step": 608
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008459911713730799,
|
|
"loss": 2.1191,
|
|
"step": 609
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008454546288716089,
|
|
"loss": 2.2266,
|
|
"step": 610
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008449173241683935,
|
|
"loss": 2.1133,
|
|
"step": 611
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008443792584489281,
|
|
"loss": 2.1562,
|
|
"step": 612
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008438404329003863,
|
|
"loss": 2.1562,
|
|
"step": 613
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008433008487116183,
|
|
"loss": 2.1758,
|
|
"step": 614
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008427605070731481,
|
|
"loss": 2.0664,
|
|
"step": 615
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008422194091771708,
|
|
"loss": 2.1797,
|
|
"step": 616
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008416775562175503,
|
|
"loss": 2.0508,
|
|
"step": 617
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.000841134949389816,
|
|
"loss": 2.1836,
|
|
"step": 618
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008405915898911611,
|
|
"loss": 2.1328,
|
|
"step": 619
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008400474789204396,
|
|
"loss": 2.1328,
|
|
"step": 620
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0008395026176781626,
|
|
"loss": 2.168,
|
|
"step": 621
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.0008389570073664976,
|
|
"loss": 2.1445,
|
|
"step": 622
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.0008384106491892642,
|
|
"loss": 2.1211,
|
|
"step": 623
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.0008378635443519327,
|
|
"loss": 2.125,
|
|
"step": 624
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.0008373156940616199,
|
|
"loss": 2.1836,
|
|
"step": 625
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.0008367670995270882,
|
|
"loss": 2.1719,
|
|
"step": 626
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.0008362177619587416,
|
|
"loss": 2.1367,
|
|
"step": 627
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.0008356676825686238,
|
|
"loss": 2.1641,
|
|
"step": 628
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.0008351168625704147,
|
|
"loss": 2.1602,
|
|
"step": 629
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.0008345653031794292,
|
|
"loss": 2.1602,
|
|
"step": 630
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.0008340130056126125,
|
|
"loss": 2.168,
|
|
"step": 631
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.0008334599710885394,
|
|
"loss": 2.082,
|
|
"step": 632
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.0008329062008274098,
|
|
"loss": 2.0723,
|
|
"step": 633
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.000832351696051048,
|
|
"loss": 2.207,
|
|
"step": 634
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.000831796457982898,
|
|
"loss": 2.1602,
|
|
"step": 635
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.0008312404878480222,
|
|
"loss": 2.1445,
|
|
"step": 636
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.0008306837868730979,
|
|
"loss": 2.1953,
|
|
"step": 637
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.0008301263562864152,
|
|
"loss": 2.1152,
|
|
"step": 638
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.0008295681973178737,
|
|
"loss": 2.0977,
|
|
"step": 639
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.0008290093111989804,
|
|
"loss": 2.1953,
|
|
"step": 640
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.0008284496991628465,
|
|
"loss": 2.0977,
|
|
"step": 641
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.0008278893624441847,
|
|
"loss": 2.0859,
|
|
"step": 642
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.000827328302279307,
|
|
"loss": 2.1836,
|
|
"step": 643
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008267665199061211,
|
|
"loss": 1.9844,
|
|
"step": 644
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008262040165641288,
|
|
"loss": 2.168,
|
|
"step": 645
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008256407934944219,
|
|
"loss": 2.1484,
|
|
"step": 646
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008250768519396807,
|
|
"loss": 2.1719,
|
|
"step": 647
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008245121931441706,
|
|
"loss": 2.1309,
|
|
"step": 648
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008239468183537393,
|
|
"loss": 2.1562,
|
|
"step": 649
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008233807288158146,
|
|
"loss": 2.0859,
|
|
"step": 650
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008228139257794012,
|
|
"loss": 2.1562,
|
|
"step": 651
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008222464104950778,
|
|
"loss": 2.0566,
|
|
"step": 652
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.000821678184214995,
|
|
"loss": 2.1152,
|
|
"step": 653
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008211092481928716,
|
|
"loss": 2.1016,
|
|
"step": 654
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008205396036839927,
|
|
"loss": 2.1582,
|
|
"step": 655
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008199692519452069,
|
|
"loss": 2.168,
|
|
"step": 656
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008193981942349224,
|
|
"loss": 2.1367,
|
|
"step": 657
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008188264318131056,
|
|
"loss": 2.1406,
|
|
"step": 658
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008182539659412776,
|
|
"loss": 2.0645,
|
|
"step": 659
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008176807978825118,
|
|
"loss": 2.0801,
|
|
"step": 660
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008171069289014306,
|
|
"loss": 2.207,
|
|
"step": 661
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008165323602642028,
|
|
"loss": 2.1797,
|
|
"step": 662
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008159570932385414,
|
|
"loss": 2.0625,
|
|
"step": 663
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008153811290936999,
|
|
"loss": 2.0898,
|
|
"step": 664
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0008148044691004698,
|
|
"loss": 2.0625,
|
|
"step": 665
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008142271145311783,
|
|
"loss": 2.1133,
|
|
"step": 666
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.000813649066659685,
|
|
"loss": 2.1758,
|
|
"step": 667
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008130703267613787,
|
|
"loss": 2.1484,
|
|
"step": 668
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008124908961131759,
|
|
"loss": 2.125,
|
|
"step": 669
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008119107759935163,
|
|
"loss": 2.0117,
|
|
"step": 670
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008113299676823615,
|
|
"loss": 2.1055,
|
|
"step": 671
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008107484724611911,
|
|
"loss": 2.2148,
|
|
"step": 672
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008101662916130006,
|
|
"loss": 2.1445,
|
|
"step": 673
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008095834264222979,
|
|
"loss": 2.1211,
|
|
"step": 674
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008089998781751009,
|
|
"loss": 2.2031,
|
|
"step": 675
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008084156481589349,
|
|
"loss": 2.1562,
|
|
"step": 676
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008078307376628291,
|
|
"loss": 2.1797,
|
|
"step": 677
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008072451479773143,
|
|
"loss": 2.0625,
|
|
"step": 678
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008066588803944195,
|
|
"loss": 2.2031,
|
|
"step": 679
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008060719362076697,
|
|
"loss": 2.1523,
|
|
"step": 680
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008054843167120826,
|
|
"loss": 2.2148,
|
|
"step": 681
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008048960232041663,
|
|
"loss": 2.1094,
|
|
"step": 682
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008043070569819153,
|
|
"loss": 2.0859,
|
|
"step": 683
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008037174193448089,
|
|
"loss": 2.1758,
|
|
"step": 684
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008031271115938077,
|
|
"loss": 2.0645,
|
|
"step": 685
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008025361350313505,
|
|
"loss": 2.0703,
|
|
"step": 686
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0008019444909613523,
|
|
"loss": 2.2148,
|
|
"step": 687
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.0008013521806892003,
|
|
"loss": 2.1367,
|
|
"step": 688
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.000800759205521752,
|
|
"loss": 2.0625,
|
|
"step": 689
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.0008001655667673318,
|
|
"loss": 2.2344,
|
|
"step": 690
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.0007995712657357279,
|
|
"loss": 2.1602,
|
|
"step": 691
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.0007989763037381904,
|
|
"loss": 2.1074,
|
|
"step": 692
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.0007983806820874271,
|
|
"loss": 2.0898,
|
|
"step": 693
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.0007977844020976016,
|
|
"loss": 2.2109,
|
|
"step": 694
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.00079718746508433,
|
|
"loss": 2.1875,
|
|
"step": 695
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.0007965898723646776,
|
|
"loss": 2.1602,
|
|
"step": 696
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.0007959916252571573,
|
|
"loss": 2.084,
|
|
"step": 697
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.000795392725081725,
|
|
"loss": 2.1172,
|
|
"step": 698
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.000794793173159778,
|
|
"loss": 2.1953,
|
|
"step": 699
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.0007941929708141513,
|
|
"loss": 2.2109,
|
|
"step": 700
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.0007935921193691153,
|
|
"loss": 2.1504,
|
|
"step": 701
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.0007929906201503722,
|
|
"loss": 2.1445,
|
|
"step": 702
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.0007923884744850536,
|
|
"loss": 2.0234,
|
|
"step": 703
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.0007917856837017176,
|
|
"loss": 2.1875,
|
|
"step": 704
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.0007911822491303452,
|
|
"loss": 2.0352,
|
|
"step": 705
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.0007905781721023382,
|
|
"loss": 2.0332,
|
|
"step": 706
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.000789973453950516,
|
|
"loss": 2.1523,
|
|
"step": 707
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.000789368096009112,
|
|
"loss": 2.1465,
|
|
"step": 708
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0007887620996137721,
|
|
"loss": 2.1445,
|
|
"step": 709
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0007881554661015497,
|
|
"loss": 2.2031,
|
|
"step": 710
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0007875481968109051,
|
|
"loss": 2.168,
|
|
"step": 711
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0007869402930817007,
|
|
"loss": 2.1172,
|
|
"step": 712
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0007863317562551987,
|
|
"loss": 2.1406,
|
|
"step": 713
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0007857225876740584,
|
|
"loss": 2.0547,
|
|
"step": 714
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0007851127886823327,
|
|
"loss": 2.2227,
|
|
"step": 715
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0007845023606254658,
|
|
"loss": 2.0879,
|
|
"step": 716
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0007838913048502894,
|
|
"loss": 2.0703,
|
|
"step": 717
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0007832796227050208,
|
|
"loss": 2.1758,
|
|
"step": 718
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0007826673155392587,
|
|
"loss": 2.1172,
|
|
"step": 719
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.000782054384703981,
|
|
"loss": 2.0898,
|
|
"step": 720
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0007814408315515418,
|
|
"loss": 2.0527,
|
|
"step": 721
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0007808266574356683,
|
|
"loss": 2.1797,
|
|
"step": 722
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0007802118637114573,
|
|
"loss": 2.1289,
|
|
"step": 723
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0007795964517353734,
|
|
"loss": 2.084,
|
|
"step": 724
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0007789804228652449,
|
|
"loss": 2.0488,
|
|
"step": 725
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0007783637784602609,
|
|
"loss": 2.168,
|
|
"step": 726
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0007777465198809692,
|
|
"loss": 2.1211,
|
|
"step": 727
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0007771286484892722,
|
|
"loss": 2.1797,
|
|
"step": 728
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.000776510165648425,
|
|
"loss": 2.1328,
|
|
"step": 729
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0007758910727230311,
|
|
"loss": 2.1133,
|
|
"step": 730
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.0007752713710790404,
|
|
"loss": 2.1914,
|
|
"step": 731
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.0007746510620837459,
|
|
"loss": 2.0508,
|
|
"step": 732
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.0007740301471057807,
|
|
"loss": 2.1328,
|
|
"step": 733
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.0007734086275151146,
|
|
"loss": 2.0918,
|
|
"step": 734
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.0007727865046830517,
|
|
"loss": 2.1484,
|
|
"step": 735
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.0007721637799822269,
|
|
"loss": 2.0645,
|
|
"step": 736
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.0007715404547866032,
|
|
"loss": 2.0781,
|
|
"step": 737
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.0007709165304714685,
|
|
"loss": 2.1152,
|
|
"step": 738
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.0007702920084134324,
|
|
"loss": 2.1113,
|
|
"step": 739
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.0007696668899904236,
|
|
"loss": 2.1719,
|
|
"step": 740
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.0007690411765816864,
|
|
"loss": 2.2305,
|
|
"step": 741
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.0007684148695677778,
|
|
"loss": 2.1289,
|
|
"step": 742
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.000767787970330565,
|
|
"loss": 2.1523,
|
|
"step": 743
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.000767160480253221,
|
|
"loss": 2.1484,
|
|
"step": 744
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.0007665324007202235,
|
|
"loss": 2.1523,
|
|
"step": 745
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.0007659037331173498,
|
|
"loss": 2.125,
|
|
"step": 746
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.0007652744788316752,
|
|
"loss": 2.043,
|
|
"step": 747
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.0007646446392515692,
|
|
"loss": 2.1289,
|
|
"step": 748
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.000764014215766693,
|
|
"loss": 2.1445,
|
|
"step": 749
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.0007633832097679958,
|
|
"loss": 2.1016,
|
|
"step": 750
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.0007627516226477122,
|
|
"loss": 2.084,
|
|
"step": 751
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.0007621194557993589,
|
|
"loss": 2.168,
|
|
"step": 752
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.0007614867106177319,
|
|
"loss": 2.0684,
|
|
"step": 753
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.0007608533884989029,
|
|
"loss": 2.1641,
|
|
"step": 754
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.0007602194908402166,
|
|
"loss": 2.1172,
|
|
"step": 755
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.0007595850190402877,
|
|
"loss": 2.1172,
|
|
"step": 756
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.0007589499744989976,
|
|
"loss": 2.1211,
|
|
"step": 757
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.0007583143586174916,
|
|
"loss": 2.1758,
|
|
"step": 758
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.000757678172798175,
|
|
"loss": 2.1367,
|
|
"step": 759
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.0007570414184447112,
|
|
"loss": 2.1523,
|
|
"step": 760
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.0007564040969620179,
|
|
"loss": 2.1328,
|
|
"step": 761
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.0007557662097562636,
|
|
"loss": 2.207,
|
|
"step": 762
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.0007551277582348658,
|
|
"loss": 2.0488,
|
|
"step": 763
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.0007544887438064862,
|
|
"loss": 2.2109,
|
|
"step": 764
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.0007538491678810294,
|
|
"loss": 2.1523,
|
|
"step": 765
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.0007532090318696381,
|
|
"loss": 2.0898,
|
|
"step": 766
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.0007525683371846913,
|
|
"loss": 2.1328,
|
|
"step": 767
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.0007519270852398001,
|
|
"loss": 2.123,
|
|
"step": 768
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.000751285277449806,
|
|
"loss": 2.082,
|
|
"step": 769
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.0007506429152307756,
|
|
"loss": 2.1387,
|
|
"step": 770
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.00075,
|
|
"loss": 2.1445,
|
|
"step": 771
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.00074935653317599,
|
|
"loss": 2.1094,
|
|
"step": 772
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.000748712516178473,
|
|
"loss": 2.125,
|
|
"step": 773
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.0007480679504283911,
|
|
"loss": 2.1719,
|
|
"step": 774
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007474228373478964,
|
|
"loss": 2.1562,
|
|
"step": 775
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007467771783603492,
|
|
"loss": 2.2266,
|
|
"step": 776
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007461309748903138,
|
|
"loss": 2.1055,
|
|
"step": 777
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007454842283635562,
|
|
"loss": 2.1172,
|
|
"step": 778
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007448369402070404,
|
|
"loss": 2.0977,
|
|
"step": 779
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007441891118489254,
|
|
"loss": 2.1836,
|
|
"step": 780
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007435407447185622,
|
|
"loss": 2.1445,
|
|
"step": 781
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007428918402464908,
|
|
"loss": 2.1172,
|
|
"step": 782
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007422423998644359,
|
|
"loss": 2.1562,
|
|
"step": 783
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007415924250053055,
|
|
"loss": 2.123,
|
|
"step": 784
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007409419171031865,
|
|
"loss": 2.1035,
|
|
"step": 785
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007402908775933419,
|
|
"loss": 2.1172,
|
|
"step": 786
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007396393079122077,
|
|
"loss": 2.0703,
|
|
"step": 787
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007389872094973896,
|
|
"loss": 2.1211,
|
|
"step": 788
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.00073833458378766,
|
|
"loss": 2.168,
|
|
"step": 789
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007376814322229544,
|
|
"loss": 2.0586,
|
|
"step": 790
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007370277562443688,
|
|
"loss": 2.1523,
|
|
"step": 791
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007363735572941564,
|
|
"loss": 2.0977,
|
|
"step": 792
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007357188368157236,
|
|
"loss": 2.0586,
|
|
"step": 793
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007350635962536284,
|
|
"loss": 2.1523,
|
|
"step": 794
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007344078370535756,
|
|
"loss": 2.0957,
|
|
"step": 795
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0007337515606624148,
|
|
"loss": 2.1758,
|
|
"step": 796
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.0007330947685281362,
|
|
"loss": 2.1445,
|
|
"step": 797
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.0007324374620998682,
|
|
"loss": 2.0293,
|
|
"step": 798
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.000731779642827874,
|
|
"loss": 2.1445,
|
|
"step": 799
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.0007311213121635483,
|
|
"loss": 2.0742,
|
|
"step": 800
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.0007304624715594139,
|
|
"loss": 2.1055,
|
|
"step": 801
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.0007298031224691193,
|
|
"loss": 2.2148,
|
|
"step": 802
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.0007291432663474339,
|
|
"loss": 2.1289,
|
|
"step": 803
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.0007284829046502467,
|
|
"loss": 2.1758,
|
|
"step": 804
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.0007278220388345619,
|
|
"loss": 2.1172,
|
|
"step": 805
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.0007271606703584958,
|
|
"loss": 1.9727,
|
|
"step": 806
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.000726498800681274,
|
|
"loss": 2.0547,
|
|
"step": 807
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.0007258364312632279,
|
|
"loss": 2.1211,
|
|
"step": 808
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.0007251735635657915,
|
|
"loss": 2.1406,
|
|
"step": 809
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.000724510199051498,
|
|
"loss": 2.1406,
|
|
"step": 810
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.0007238463391839769,
|
|
"loss": 2.1953,
|
|
"step": 811
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.0007231819854279508,
|
|
"loss": 2.1367,
|
|
"step": 812
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.0007225171392492316,
|
|
"loss": 2.0625,
|
|
"step": 813
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.0007218518021147182,
|
|
"loss": 2.125,
|
|
"step": 814
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.0007211859754923923,
|
|
"loss": 2.1445,
|
|
"step": 815
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.0007205196608513158,
|
|
"loss": 2.0371,
|
|
"step": 816
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.0007198528596616272,
|
|
"loss": 2.1406,
|
|
"step": 817
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.0007191855733945387,
|
|
"loss": 2.0273,
|
|
"step": 818
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.0007185178035223327,
|
|
"loss": 2.0195,
|
|
"step": 819
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.0007178495515183583,
|
|
"loss": 2.0098,
|
|
"step": 820
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.000717180818857029,
|
|
"loss": 2.043,
|
|
"step": 821
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.0007165116070138182,
|
|
"loss": 2.1211,
|
|
"step": 822
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.0007158419174652569,
|
|
"loss": 2.0156,
|
|
"step": 823
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.00071517175168893,
|
|
"loss": 2.0879,
|
|
"step": 824
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.0007145011111634732,
|
|
"loss": 2.0645,
|
|
"step": 825
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.0007138299973685694,
|
|
"loss": 2.1602,
|
|
"step": 826
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.0007131584117849459,
|
|
"loss": 2.125,
|
|
"step": 827
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.0007124863558943713,
|
|
"loss": 2.1445,
|
|
"step": 828
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.0007118138311796514,
|
|
"loss": 2.1367,
|
|
"step": 829
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.0007111408391246262,
|
|
"loss": 2.1348,
|
|
"step": 830
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.0007104673812141675,
|
|
"loss": 2.043,
|
|
"step": 831
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.0007097934589341745,
|
|
"loss": 2.0781,
|
|
"step": 832
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.0007091190737715711,
|
|
"loss": 2.1289,
|
|
"step": 833
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.0007084442272143026,
|
|
"loss": 2.125,
|
|
"step": 834
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.000707768920751332,
|
|
"loss": 2.1211,
|
|
"step": 835
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.0007070931558726373,
|
|
"loss": 2.1953,
|
|
"step": 836
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.0007064169340692076,
|
|
"loss": 2.1719,
|
|
"step": 837
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.0007057402568330407,
|
|
"loss": 2.0645,
|
|
"step": 838
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.0007050631256571389,
|
|
"loss": 2.0938,
|
|
"step": 839
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.000704385542035506,
|
|
"loss": 2.125,
|
|
"step": 840
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.000703707507463144,
|
|
"loss": 2.1914,
|
|
"step": 841
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.0007030290234360505,
|
|
"loss": 2.0801,
|
|
"step": 842
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.0007023500914512139,
|
|
"loss": 2.0898,
|
|
"step": 843
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.0007016707130066116,
|
|
"loss": 2.1758,
|
|
"step": 844
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.0007009908896012055,
|
|
"loss": 2.1289,
|
|
"step": 845
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.0007003106227349399,
|
|
"loss": 2.1211,
|
|
"step": 846
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.000699629913908737,
|
|
"loss": 2.0469,
|
|
"step": 847
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.0006989487646244943,
|
|
"loss": 2.1445,
|
|
"step": 848
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.0006982671763850814,
|
|
"loss": 2.0176,
|
|
"step": 849
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.0006975851506943359,
|
|
"loss": 2.1562,
|
|
"step": 850
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.0006969026890570611,
|
|
"loss": 2.1484,
|
|
"step": 851
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.0006962197929790216,
|
|
"loss": 2.043,
|
|
"step": 852
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.0006955364639669409,
|
|
"loss": 2.1445,
|
|
"step": 853
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.0006948527035284978,
|
|
"loss": 2.0508,
|
|
"step": 854
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.0006941685131723225,
|
|
"loss": 2.1211,
|
|
"step": 855
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.0006934838944079943,
|
|
"loss": 2.1211,
|
|
"step": 856
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.0006927988487460378,
|
|
"loss": 2.125,
|
|
"step": 857
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.0006921133776979186,
|
|
"loss": 2.0918,
|
|
"step": 858
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.0006914274827760418,
|
|
"loss": 2.0781,
|
|
"step": 859
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.0006907411654937475,
|
|
"loss": 1.998,
|
|
"step": 860
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.0006900544273653075,
|
|
"loss": 2.1172,
|
|
"step": 861
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.000689367269905922,
|
|
"loss": 1.9902,
|
|
"step": 862
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0006886796946317168,
|
|
"loss": 2.0039,
|
|
"step": 863
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0006879917030597397,
|
|
"loss": 2.0859,
|
|
"step": 864
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0006873032967079561,
|
|
"loss": 2.1719,
|
|
"step": 865
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0006866144770952474,
|
|
"loss": 2.0684,
|
|
"step": 866
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0006859252457414067,
|
|
"loss": 2.1562,
|
|
"step": 867
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0006852356041671351,
|
|
"loss": 2.1055,
|
|
"step": 868
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0006845455538940394,
|
|
"loss": 2.0859,
|
|
"step": 869
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0006838550964446276,
|
|
"loss": 2.125,
|
|
"step": 870
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0006831642333423067,
|
|
"loss": 2.0508,
|
|
"step": 871
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.000682472966111378,
|
|
"loss": 2.0371,
|
|
"step": 872
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0006817812962770348,
|
|
"loss": 2.0703,
|
|
"step": 873
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0006810892253653589,
|
|
"loss": 2.1562,
|
|
"step": 874
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0006803967549033167,
|
|
"loss": 2.1484,
|
|
"step": 875
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0006797038864187564,
|
|
"loss": 2.0859,
|
|
"step": 876
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0006790106214404043,
|
|
"loss": 2.1719,
|
|
"step": 877
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0006783169614978614,
|
|
"loss": 2.0859,
|
|
"step": 878
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0006776229081216001,
|
|
"loss": 2.1016,
|
|
"step": 879
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0006769284628429611,
|
|
"loss": 2.1094,
|
|
"step": 880
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0006762336271941498,
|
|
"loss": 2.0781,
|
|
"step": 881
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0006755384027082326,
|
|
"loss": 2.0508,
|
|
"step": 882
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0006748427909191342,
|
|
"loss": 2.0312,
|
|
"step": 883
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.0006741467933616335,
|
|
"loss": 2.1406,
|
|
"step": 884
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.0006734504115713604,
|
|
"loss": 2.1055,
|
|
"step": 885
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.0006727536470847932,
|
|
"loss": 2.1719,
|
|
"step": 886
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.000672056501439254,
|
|
"loss": 2.0898,
|
|
"step": 887
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.0006713589761729063,
|
|
"loss": 2.0859,
|
|
"step": 888
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.0006706610728247508,
|
|
"loss": 2.0703,
|
|
"step": 889
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.0006699627929346227,
|
|
"loss": 2.1562,
|
|
"step": 890
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.0006692641380431879,
|
|
"loss": 2.123,
|
|
"step": 891
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.0006685651096919393,
|
|
"loss": 2.0879,
|
|
"step": 892
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.0006678657094231944,
|
|
"loss": 2.1523,
|
|
"step": 893
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.0006671659387800909,
|
|
"loss": 2.0898,
|
|
"step": 894
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.000666465799306584,
|
|
"loss": 2.1523,
|
|
"step": 895
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.0006657652925474423,
|
|
"loss": 2.1484,
|
|
"step": 896
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.000665064420048245,
|
|
"loss": 2.0762,
|
|
"step": 897
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.0006643631833553785,
|
|
"loss": 2.125,
|
|
"step": 898
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.000663661584016032,
|
|
"loss": 2.1719,
|
|
"step": 899
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.0006629596235781957,
|
|
"loss": 2.1992,
|
|
"step": 900
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.0006622573035906556,
|
|
"loss": 2.041,
|
|
"step": 901
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.0006615546256029921,
|
|
"loss": 2.0527,
|
|
"step": 902
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.0006608515911655743,
|
|
"loss": 2.0781,
|
|
"step": 903
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.0006601482018295591,
|
|
"loss": 2.1367,
|
|
"step": 904
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.0006594444591468851,
|
|
"loss": 2.0156,
|
|
"step": 905
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.0006587403646702713,
|
|
"loss": 2.0117,
|
|
"step": 906
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.0006580359199532126,
|
|
"loss": 2.125,
|
|
"step": 907
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.000657331126549977,
|
|
"loss": 2.1367,
|
|
"step": 908
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.0006566259860156014,
|
|
"loss": 2.1211,
|
|
"step": 909
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.0006559204999058888,
|
|
"loss": 2.1133,
|
|
"step": 910
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.0006552146697774049,
|
|
"loss": 2.1523,
|
|
"step": 911
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.0006545084971874737,
|
|
"loss": 1.9941,
|
|
"step": 912
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.0006538019836941758,
|
|
"loss": 2.0273,
|
|
"step": 913
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.0006530951308563431,
|
|
"loss": 2.1094,
|
|
"step": 914
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.0006523879402335567,
|
|
"loss": 2.125,
|
|
"step": 915
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.0006516804133861429,
|
|
"loss": 2.0977,
|
|
"step": 916
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.0006509725518751698,
|
|
"loss": 2.1797,
|
|
"step": 917
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.0006502643572624438,
|
|
"loss": 2.2227,
|
|
"step": 918
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.0006495558311105064,
|
|
"loss": 2.0527,
|
|
"step": 919
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.0006488469749826305,
|
|
"loss": 2.1406,
|
|
"step": 920
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.000648137790442817,
|
|
"loss": 2.1445,
|
|
"step": 921
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.0006474282790557916,
|
|
"loss": 2.082,
|
|
"step": 922
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.000646718442387001,
|
|
"loss": 2.1289,
|
|
"step": 923
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.0006460082820026094,
|
|
"loss": 2.1211,
|
|
"step": 924
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.0006452977994694959,
|
|
"loss": 2.0898,
|
|
"step": 925
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.0006445869963552496,
|
|
"loss": 2.1875,
|
|
"step": 926
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006438758742281672,
|
|
"loss": 2.0918,
|
|
"step": 927
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006431644346572495,
|
|
"loss": 2.1016,
|
|
"step": 928
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006424526792121974,
|
|
"loss": 2.1289,
|
|
"step": 929
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006417406094634089,
|
|
"loss": 2.0449,
|
|
"step": 930
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006410282269819756,
|
|
"loss": 1.9824,
|
|
"step": 931
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006403155333396787,
|
|
"loss": 2.1309,
|
|
"step": 932
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006396025301089863,
|
|
"loss": 2.0781,
|
|
"step": 933
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006388892188630493,
|
|
"loss": 2.0762,
|
|
"step": 934
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006381756011756982,
|
|
"loss": 2.1953,
|
|
"step": 935
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006374616786214403,
|
|
"loss": 2.1328,
|
|
"step": 936
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006367474527754544,
|
|
"loss": 2.0781,
|
|
"step": 937
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006360329252135894,
|
|
"loss": 2.0176,
|
|
"step": 938
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006353180975123595,
|
|
"loss": 2.0508,
|
|
"step": 939
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006346029712489413,
|
|
"loss": 2.1055,
|
|
"step": 940
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006338875480011698,
|
|
"loss": 2.0625,
|
|
"step": 941
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006331718293475357,
|
|
"loss": 2.1328,
|
|
"step": 942
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006324558168671811,
|
|
"loss": 2.0176,
|
|
"step": 943
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006317395121398968,
|
|
"loss": 2.1641,
|
|
"step": 944
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006310229167461179,
|
|
"loss": 2.043,
|
|
"step": 945
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006303060322669214,
|
|
"loss": 2.0859,
|
|
"step": 946
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006295888602840214,
|
|
"loss": 2.0664,
|
|
"step": 947
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0006288714023797671,
|
|
"loss": 2.0312,
|
|
"step": 948
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.000628153660137138,
|
|
"loss": 2.082,
|
|
"step": 949
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.0006274356351397413,
|
|
"loss": 2.1172,
|
|
"step": 950
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.0006267173289718079,
|
|
"loss": 2.1094,
|
|
"step": 951
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.000625998743218189,
|
|
"loss": 2.168,
|
|
"step": 952
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.000625279879464353,
|
|
"loss": 2.0898,
|
|
"step": 953
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.000624560739296381,
|
|
"loss": 2.0781,
|
|
"step": 954
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.0006238413243009648,
|
|
"loss": 2.041,
|
|
"step": 955
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.000623121636065402,
|
|
"loss": 2.1602,
|
|
"step": 956
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.0006224016761775933,
|
|
"loss": 2.1035,
|
|
"step": 957
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.0006216814462260386,
|
|
"loss": 2.1719,
|
|
"step": 958
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.0006209609477998338,
|
|
"loss": 2.0938,
|
|
"step": 959
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.0006202401824886674,
|
|
"loss": 2.1602,
|
|
"step": 960
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.0006195191518828162,
|
|
"loss": 2.0742,
|
|
"step": 961
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.0006187978575731427,
|
|
"loss": 2.1562,
|
|
"step": 962
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.0006180763011510911,
|
|
"loss": 2.1914,
|
|
"step": 963
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.000617354484208684,
|
|
"loss": 1.9629,
|
|
"step": 964
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.0006166324083385189,
|
|
"loss": 2.1641,
|
|
"step": 965
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.0006159100751337642,
|
|
"loss": 2.1055,
|
|
"step": 966
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.0006151874861881565,
|
|
"loss": 2.1211,
|
|
"step": 967
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.0006144646430959964,
|
|
"loss": 2.1797,
|
|
"step": 968
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.0006137415474521454,
|
|
"loss": 2.1133,
|
|
"step": 969
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.0006130182008520222,
|
|
"loss": 2.1777,
|
|
"step": 970
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.000612294604891599,
|
|
"loss": 2.0859,
|
|
"step": 971
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0006115707611673986,
|
|
"loss": 2.0625,
|
|
"step": 972
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0006108466712764902,
|
|
"loss": 2.084,
|
|
"step": 973
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0006101223368164858,
|
|
"loss": 2.1211,
|
|
"step": 974
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0006093977593855375,
|
|
"loss": 2.0898,
|
|
"step": 975
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0006086729405823335,
|
|
"loss": 2.0625,
|
|
"step": 976
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0006079478820060943,
|
|
"loss": 2.082,
|
|
"step": 977
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0006072225852565695,
|
|
"loss": 2.084,
|
|
"step": 978
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0006064970519340341,
|
|
"loss": 2.0195,
|
|
"step": 979
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0006057712836392856,
|
|
"loss": 2.0391,
|
|
"step": 980
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0006050452819736389,
|
|
"loss": 2.1289,
|
|
"step": 981
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.000604319048538925,
|
|
"loss": 2.0488,
|
|
"step": 982
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0006035925849374855,
|
|
"loss": 2.168,
|
|
"step": 983
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0006028658927721697,
|
|
"loss": 2.1484,
|
|
"step": 984
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0006021389736463321,
|
|
"loss": 2.0332,
|
|
"step": 985
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0006014118291638271,
|
|
"loss": 2.0195,
|
|
"step": 986
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0006006844609290065,
|
|
"loss": 2.1191,
|
|
"step": 987
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0005999568705467161,
|
|
"loss": 2.0781,
|
|
"step": 988
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0005992290596222915,
|
|
"loss": 2.1914,
|
|
"step": 989
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0005985010297615551,
|
|
"loss": 2.1719,
|
|
"step": 990
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0005977727825708123,
|
|
"loss": 2.082,
|
|
"step": 991
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0005970443196568478,
|
|
"loss": 2.1211,
|
|
"step": 992
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005963156426269227,
|
|
"loss": 2.0781,
|
|
"step": 993
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005955867530887702,
|
|
"loss": 2.0547,
|
|
"step": 994
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005948576526505923,
|
|
"loss": 2.0859,
|
|
"step": 995
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005941283429210568,
|
|
"loss": 2.0879,
|
|
"step": 996
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005933988255092926,
|
|
"loss": 2.1055,
|
|
"step": 997
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005926691020248874,
|
|
"loss": 1.9883,
|
|
"step": 998
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005919391740778833,
|
|
"loss": 2.0859,
|
|
"step": 999
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005912090432787736,
|
|
"loss": 2.082,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.000590478711238499,
|
|
"loss": 2.1328,
|
|
"step": 1001
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005897481795684446,
|
|
"loss": 2.1016,
|
|
"step": 1002
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005890174498804355,
|
|
"loss": 2.1641,
|
|
"step": 1003
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005882865237867339,
|
|
"loss": 2.0605,
|
|
"step": 1004
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005875554029000353,
|
|
"loss": 2.084,
|
|
"step": 1005
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005868240888334653,
|
|
"loss": 2.1445,
|
|
"step": 1006
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005860925832005753,
|
|
"loss": 2.0664,
|
|
"step": 1007
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005853608876153395,
|
|
"loss": 2.043,
|
|
"step": 1008
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005846290036921512,
|
|
"loss": 2.1035,
|
|
"step": 1009
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005838969330458195,
|
|
"loss": 2.1797,
|
|
"step": 1010
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005831646772915651,
|
|
"loss": 2.0977,
|
|
"step": 1011
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005824322380450173,
|
|
"loss": 2.1758,
|
|
"step": 1012
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005816996169222102,
|
|
"loss": 2.123,
|
|
"step": 1013
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0005809668155395793,
|
|
"loss": 2.0938,
|
|
"step": 1014
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.0005802338355139578,
|
|
"loss": 2.0645,
|
|
"step": 1015
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.0005795006784625728,
|
|
"loss": 2.0977,
|
|
"step": 1016
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.0005787673460030423,
|
|
"loss": 2.0742,
|
|
"step": 1017
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.000578033839753371,
|
|
"loss": 2.082,
|
|
"step": 1018
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.0005773001613319476,
|
|
"loss": 2.1211,
|
|
"step": 1019
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.00057656631235754,
|
|
"loss": 2.0762,
|
|
"step": 1020
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.0005758322944492929,
|
|
"loss": 2.1523,
|
|
"step": 1021
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.0005750981092267237,
|
|
"loss": 2.0996,
|
|
"step": 1022
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.0005743637583097183,
|
|
"loss": 2.125,
|
|
"step": 1023
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.0005736292433185291,
|
|
"loss": 2.0332,
|
|
"step": 1024
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.0005728945658737699,
|
|
"loss": 2.0977,
|
|
"step": 1025
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.0005721597275964133,
|
|
"loss": 2.1641,
|
|
"step": 1026
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.0005714247301077865,
|
|
"loss": 2.0957,
|
|
"step": 1027
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.0005706895750295682,
|
|
"loss": 2.1191,
|
|
"step": 1028
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.0005699542639837844,
|
|
"loss": 2.0312,
|
|
"step": 1029
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.0005692187985928055,
|
|
"loss": 2.1836,
|
|
"step": 1030
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.0005684831804793427,
|
|
"loss": 2.0469,
|
|
"step": 1031
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.0005677474112664438,
|
|
"loss": 2.0352,
|
|
"step": 1032
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.0005670114925774899,
|
|
"loss": 2.043,
|
|
"step": 1033
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.0005662754260361924,
|
|
"loss": 2.1367,
|
|
"step": 1034
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.0005655392132665884,
|
|
"loss": 2.1016,
|
|
"step": 1035
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.000564802855893038,
|
|
"loss": 2.0723,
|
|
"step": 1036
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0005640663555402198,
|
|
"loss": 2.125,
|
|
"step": 1037
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0005633297138331285,
|
|
"loss": 2.0703,
|
|
"step": 1038
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0005625929323970705,
|
|
"loss": 2.1211,
|
|
"step": 1039
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0005618560128576603,
|
|
"loss": 2.0254,
|
|
"step": 1040
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0005611189568408173,
|
|
"loss": 2.1309,
|
|
"step": 1041
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0005603817659727619,
|
|
"loss": 2.1094,
|
|
"step": 1042
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0005596444418800121,
|
|
"loss": 2.1719,
|
|
"step": 1043
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0005589069861893798,
|
|
"loss": 2.0625,
|
|
"step": 1044
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0005581694005279673,
|
|
"loss": 2.1055,
|
|
"step": 1045
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0005574316865231637,
|
|
"loss": 2.0859,
|
|
"step": 1046
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0005566938458026411,
|
|
"loss": 2.0566,
|
|
"step": 1047
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0005559558799943514,
|
|
"loss": 2.125,
|
|
"step": 1048
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0005552177907265223,
|
|
"loss": 2.0195,
|
|
"step": 1049
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.000554479579627654,
|
|
"loss": 2.1445,
|
|
"step": 1050
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0005537412483265157,
|
|
"loss": 2.1133,
|
|
"step": 1051
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0005530027984521413,
|
|
"loss": 2.0898,
|
|
"step": 1052
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0005522642316338268,
|
|
"loss": 2.0801,
|
|
"step": 1053
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0005515255495011259,
|
|
"loss": 2.1172,
|
|
"step": 1054
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0005507867536838472,
|
|
"loss": 2.0898,
|
|
"step": 1055
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0005500478458120492,
|
|
"loss": 2.0859,
|
|
"step": 1056
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0005493088275160387,
|
|
"loss": 2.0938,
|
|
"step": 1057
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.0005485697004263657,
|
|
"loss": 2.1074,
|
|
"step": 1058
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.0005478304661738199,
|
|
"loss": 2.1387,
|
|
"step": 1059
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.0005470911263894279,
|
|
"loss": 2.0625,
|
|
"step": 1060
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.0005463516827044491,
|
|
"loss": 2.1055,
|
|
"step": 1061
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.000545612136750372,
|
|
"loss": 2.1211,
|
|
"step": 1062
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.0005448724901589107,
|
|
"loss": 2.0078,
|
|
"step": 1063
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.0005441327445620014,
|
|
"loss": 2.0801,
|
|
"step": 1064
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.0005433929015917988,
|
|
"loss": 2.1406,
|
|
"step": 1065
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.0005426529628806724,
|
|
"loss": 2.0781,
|
|
"step": 1066
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.0005419129300612029,
|
|
"loss": 2.0645,
|
|
"step": 1067
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.000541172804766179,
|
|
"loss": 2.1562,
|
|
"step": 1068
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.0005404325886285927,
|
|
"loss": 2.082,
|
|
"step": 1069
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.000539692283281637,
|
|
"loss": 2.0371,
|
|
"step": 1070
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.0005389518903587017,
|
|
"loss": 2.1289,
|
|
"step": 1071
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.0005382114114933695,
|
|
"loss": 2.0547,
|
|
"step": 1072
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.0005374708483194132,
|
|
"loss": 2.0371,
|
|
"step": 1073
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.000536730202470791,
|
|
"loss": 2.0723,
|
|
"step": 1074
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.0005359894755816443,
|
|
"loss": 2.0977,
|
|
"step": 1075
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.0005352486692862926,
|
|
"loss": 2.1172,
|
|
"step": 1076
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.0005345077852192307,
|
|
"loss": 2.1445,
|
|
"step": 1077
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.0005337668250151254,
|
|
"loss": 2.0117,
|
|
"step": 1078
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.0005330257903088111,
|
|
"loss": 2.0938,
|
|
"step": 1079
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.000532284682735287,
|
|
"loss": 2.0664,
|
|
"step": 1080
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0005315435039297124,
|
|
"loss": 2.0918,
|
|
"step": 1081
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0005308022555274046,
|
|
"loss": 2.0176,
|
|
"step": 1082
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0005300609391638336,
|
|
"loss": 2.0918,
|
|
"step": 1083
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0005293195564746201,
|
|
"loss": 2.0977,
|
|
"step": 1084
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0005285781090955304,
|
|
"loss": 2.0742,
|
|
"step": 1085
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0005278365986624743,
|
|
"loss": 2.1406,
|
|
"step": 1086
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0005270950268115001,
|
|
"loss": 2.0566,
|
|
"step": 1087
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0005263533951787919,
|
|
"loss": 2.0527,
|
|
"step": 1088
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.000525611705400666,
|
|
"loss": 2.0449,
|
|
"step": 1089
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0005248699591135664,
|
|
"loss": 2.1328,
|
|
"step": 1090
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0005241281579540618,
|
|
"loss": 2.0781,
|
|
"step": 1091
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0005233863035588427,
|
|
"loss": 2.0176,
|
|
"step": 1092
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0005226443975647161,
|
|
"loss": 2.0312,
|
|
"step": 1093
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0005219024416086036,
|
|
"loss": 2.0273,
|
|
"step": 1094
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0005211604373275366,
|
|
"loss": 2.0254,
|
|
"step": 1095
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0005204183863586533,
|
|
"loss": 2.1055,
|
|
"step": 1096
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0005196762903391951,
|
|
"loss": 2.1836,
|
|
"step": 1097
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0005189341509065023,
|
|
"loss": 2.0801,
|
|
"step": 1098
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0005181919696980112,
|
|
"loss": 2.0605,
|
|
"step": 1099
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0005174497483512506,
|
|
"loss": 2.0137,
|
|
"step": 1100
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0005167074885038374,
|
|
"loss": 2.1016,
|
|
"step": 1101
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.0005159651917934735,
|
|
"loss": 2.0352,
|
|
"step": 1102
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.0005152228598579428,
|
|
"loss": 2.0703,
|
|
"step": 1103
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.000514480494335106,
|
|
"loss": 2.0781,
|
|
"step": 1104
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.0005137380968628983,
|
|
"loss": 2.0117,
|
|
"step": 1105
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.0005129956690793255,
|
|
"loss": 2.0781,
|
|
"step": 1106
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.0005122532126224601,
|
|
"loss": 2.0781,
|
|
"step": 1107
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.0005115107291304378,
|
|
"loss": 2.0332,
|
|
"step": 1108
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.0005107682202414544,
|
|
"loss": 2.0566,
|
|
"step": 1109
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.0005100256875937613,
|
|
"loss": 2.0156,
|
|
"step": 1110
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.0005092831328256625,
|
|
"loss": 2.0879,
|
|
"step": 1111
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.0005085405575755105,
|
|
"loss": 2.0527,
|
|
"step": 1112
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.0005077979634817034,
|
|
"loss": 2.1289,
|
|
"step": 1113
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.0005070553521826808,
|
|
"loss": 2.1172,
|
|
"step": 1114
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.00050631272531692,
|
|
"loss": 2.0586,
|
|
"step": 1115
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.0005055700845229327,
|
|
"loss": 2.0059,
|
|
"step": 1116
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.000504827431439262,
|
|
"loss": 2.0664,
|
|
"step": 1117
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.000504084767704477,
|
|
"loss": 2.041,
|
|
"step": 1118
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.0005033420949571712,
|
|
"loss": 2.082,
|
|
"step": 1119
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.0005025994148359574,
|
|
"loss": 2.125,
|
|
"step": 1120
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.0005018567289794651,
|
|
"loss": 2.0723,
|
|
"step": 1121
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.0005011140390263362,
|
|
"loss": 2.0527,
|
|
"step": 1122
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.0005003713466152218,
|
|
"loss": 2.0859,
|
|
"step": 1123
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.0004996286533847783,
|
|
"loss": 2.0977,
|
|
"step": 1124
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.000498885960973664,
|
|
"loss": 2.043,
|
|
"step": 1125
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.000498143271020535,
|
|
"loss": 2.1055,
|
|
"step": 1126
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.0004974005851640428,
|
|
"loss": 2.0625,
|
|
"step": 1127
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.000496657905042829,
|
|
"loss": 2.0195,
|
|
"step": 1128
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.0004959152322955232,
|
|
"loss": 2.0957,
|
|
"step": 1129
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.0004951725685607382,
|
|
"loss": 2.0742,
|
|
"step": 1130
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.0004944299154770673,
|
|
"loss": 2.1055,
|
|
"step": 1131
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.0004936872746830802,
|
|
"loss": 2.1406,
|
|
"step": 1132
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.0004929446478173195,
|
|
"loss": 2.0527,
|
|
"step": 1133
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.0004922020365182968,
|
|
"loss": 2.043,
|
|
"step": 1134
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.0004914594424244897,
|
|
"loss": 2.1328,
|
|
"step": 1135
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.0004907168671743376,
|
|
"loss": 2.1094,
|
|
"step": 1136
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.0004899743124062387,
|
|
"loss": 2.0898,
|
|
"step": 1137
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.0004892317797585456,
|
|
"loss": 2.1133,
|
|
"step": 1138
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.0004884892708695623,
|
|
"loss": 2.0664,
|
|
"step": 1139
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.0004877467873775402,
|
|
"loss": 2.1289,
|
|
"step": 1140
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.00048700433092067473,
|
|
"loss": 1.9902,
|
|
"step": 1141
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.0004862619031371019,
|
|
"loss": 2.0645,
|
|
"step": 1142
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.0004855195056648942,
|
|
"loss": 2.1016,
|
|
"step": 1143
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.00048477714014205734,
|
|
"loss": 2.0625,
|
|
"step": 1144
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.00048403480820652644,
|
|
"loss": 2.0723,
|
|
"step": 1145
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.0004832925114961629,
|
|
"loss": 2.082,
|
|
"step": 1146
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.0004825502516487497,
|
|
"loss": 2.0664,
|
|
"step": 1147
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.00048180803030198896,
|
|
"loss": 2.1211,
|
|
"step": 1148
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.0004810658490934979,
|
|
"loss": 2.043,
|
|
"step": 1149
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.000480323709660805,
|
|
"loss": 2.0605,
|
|
"step": 1150
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.0004795816136413467,
|
|
"loss": 2.0742,
|
|
"step": 1151
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.00047883956267246353,
|
|
"loss": 2.1445,
|
|
"step": 1152
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.00047809755839139657,
|
|
"loss": 2.1406,
|
|
"step": 1153
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.0004773556024352841,
|
|
"loss": 2.1133,
|
|
"step": 1154
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.00047661369644115754,
|
|
"loss": 2.125,
|
|
"step": 1155
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.0004758718420459383,
|
|
"loss": 2.0742,
|
|
"step": 1156
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.0004751300408864339,
|
|
"loss": 2.0352,
|
|
"step": 1157
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.00047438829459933414,
|
|
"loss": 2.0371,
|
|
"step": 1158
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.0004736466048212082,
|
|
"loss": 2.0605,
|
|
"step": 1159
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.0004729049731885002,
|
|
"loss": 2.0391,
|
|
"step": 1160
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.000472163401337526,
|
|
"loss": 2.0215,
|
|
"step": 1161
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.00047142189090446985,
|
|
"loss": 1.9824,
|
|
"step": 1162
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.0004706804435253802,
|
|
"loss": 2.0547,
|
|
"step": 1163
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.0004699390608361665,
|
|
"loss": 2.082,
|
|
"step": 1164
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.0004691977444725955,
|
|
"loss": 2.0703,
|
|
"step": 1165
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.0004684564960702877,
|
|
"loss": 2.0508,
|
|
"step": 1166
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.0004677153172647131,
|
|
"loss": 2.0098,
|
|
"step": 1167
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.00046697420969118894,
|
|
"loss": 2.1172,
|
|
"step": 1168
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.00046623317498487466,
|
|
"loss": 2.0312,
|
|
"step": 1169
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.0004654922147807694,
|
|
"loss": 2.1016,
|
|
"step": 1170
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.00046475133071370757,
|
|
"loss": 2.1914,
|
|
"step": 1171
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.00046401052441835574,
|
|
"loss": 1.9629,
|
|
"step": 1172
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.000463269797529209,
|
|
"loss": 1.9805,
|
|
"step": 1173
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.00046252915168058697,
|
|
"loss": 2.0723,
|
|
"step": 1174
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.0004617885885066305,
|
|
"loss": 2.043,
|
|
"step": 1175
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.0004610481096412984,
|
|
"loss": 2.0527,
|
|
"step": 1176
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.000460307716718363,
|
|
"loss": 2.0586,
|
|
"step": 1177
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.0004595674113714074,
|
|
"loss": 2.1074,
|
|
"step": 1178
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.0004588271952338212,
|
|
"loss": 2.0625,
|
|
"step": 1179
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.00045808706993879714,
|
|
"loss": 2.123,
|
|
"step": 1180
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.00045734703711932767,
|
|
"loss": 2.0566,
|
|
"step": 1181
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.0004566070984082013,
|
|
"loss": 2.0664,
|
|
"step": 1182
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.00045586725543799865,
|
|
"loss": 2.0352,
|
|
"step": 1183
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.00045512750984108937,
|
|
"loss": 2.0469,
|
|
"step": 1184
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.000454387863249628,
|
|
"loss": 2.125,
|
|
"step": 1185
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.00045364831729555096,
|
|
"loss": 2.0605,
|
|
"step": 1186
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.0004529088736105721,
|
|
"loss": 2.0605,
|
|
"step": 1187
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.0004521695338261802,
|
|
"loss": 2.1074,
|
|
"step": 1188
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.0004514302995736344,
|
|
"loss": 2.0996,
|
|
"step": 1189
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.0004506911724839613,
|
|
"loss": 2.0938,
|
|
"step": 1190
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.0004499521541879508,
|
|
"loss": 2.1016,
|
|
"step": 1191
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.00044921324631615303,
|
|
"loss": 2.0117,
|
|
"step": 1192
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.0004484744504988742,
|
|
"loss": 2.1094,
|
|
"step": 1193
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.00044773576836617336,
|
|
"loss": 2.0469,
|
|
"step": 1194
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.0004469972015478588,
|
|
"loss": 2.0898,
|
|
"step": 1195
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.0004462587516734844,
|
|
"loss": 2.1992,
|
|
"step": 1196
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.00044552042037234596,
|
|
"loss": 2.0527,
|
|
"step": 1197
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.00044478220927347774,
|
|
"loss": 2.082,
|
|
"step": 1198
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.00044404412000564875,
|
|
"loss": 2.0547,
|
|
"step": 1199
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.000443306154197359,
|
|
"loss": 2.0859,
|
|
"step": 1200
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.00044256831347683646,
|
|
"loss": 2.1289,
|
|
"step": 1201
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.0004418305994720328,
|
|
"loss": 2.0547,
|
|
"step": 1202
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.0004410930138106203,
|
|
"loss": 2.0645,
|
|
"step": 1203
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.000440355558119988,
|
|
"loss": 2.0645,
|
|
"step": 1204
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.00043961823402723814,
|
|
"loss": 2.1211,
|
|
"step": 1205
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.0004388810431591829,
|
|
"loss": 2.0449,
|
|
"step": 1206
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.0004381439871423398,
|
|
"loss": 2.0371,
|
|
"step": 1207
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.00043740706760292966,
|
|
"loss": 2.1562,
|
|
"step": 1208
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.0004366702861668716,
|
|
"loss": 2.0703,
|
|
"step": 1209
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.00043593364445978036,
|
|
"loss": 2.0371,
|
|
"step": 1210
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.0004351971441069622,
|
|
"loss": 2.082,
|
|
"step": 1211
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.0004344607867334116,
|
|
"loss": 2.1055,
|
|
"step": 1212
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.00043372457396380766,
|
|
"loss": 2.0312,
|
|
"step": 1213
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.00043298850742251013,
|
|
"loss": 2.0586,
|
|
"step": 1214
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.0004322525887335563,
|
|
"loss": 2.1016,
|
|
"step": 1215
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.00043151681952065734,
|
|
"loss": 2.123,
|
|
"step": 1216
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.00043078120140719456,
|
|
"loss": 2.1172,
|
|
"step": 1217
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.0004300457360162158,
|
|
"loss": 2.0508,
|
|
"step": 1218
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.0004293104249704319,
|
|
"loss": 2.0664,
|
|
"step": 1219
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.00042857526989221355,
|
|
"loss": 2.0625,
|
|
"step": 1220
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.00042784027240358674,
|
|
"loss": 2.0215,
|
|
"step": 1221
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.0004271054341262301,
|
|
"loss": 1.9688,
|
|
"step": 1222
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.000426370756681471,
|
|
"loss": 2.1328,
|
|
"step": 1223
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.0004256362416902817,
|
|
"loss": 2.0469,
|
|
"step": 1224
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.00042490189077327637,
|
|
"loss": 2.0293,
|
|
"step": 1225
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.00042416770555070703,
|
|
"loss": 1.9902,
|
|
"step": 1226
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.00042343368764246,
|
|
"loss": 2.0977,
|
|
"step": 1227
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.0004226998386680524,
|
|
"loss": 2.0879,
|
|
"step": 1228
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.000421966160246629,
|
|
"loss": 2.1094,
|
|
"step": 1229
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.00042123265399695783,
|
|
"loss": 2.0449,
|
|
"step": 1230
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.0004204993215374273,
|
|
"loss": 2.0273,
|
|
"step": 1231
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.00041976616448604226,
|
|
"loss": 2.1328,
|
|
"step": 1232
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.00041903318446042076,
|
|
"loss": 2.1152,
|
|
"step": 1233
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.00041830038307778984,
|
|
"loss": 2.1211,
|
|
"step": 1234
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.0004175677619549828,
|
|
"loss": 2.0703,
|
|
"step": 1235
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.000416835322708435,
|
|
"loss": 2.0898,
|
|
"step": 1236
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.00041610306695418056,
|
|
"loss": 2.082,
|
|
"step": 1237
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.0004153709963078488,
|
|
"loss": 2.1016,
|
|
"step": 1238
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.0004146391123846606,
|
|
"loss": 2.0059,
|
|
"step": 1239
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.0004139074167994249,
|
|
"loss": 2.1816,
|
|
"step": 1240
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.00041317591116653486,
|
|
"loss": 2.0684,
|
|
"step": 1241
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.0004124445970999648,
|
|
"loss": 2.0352,
|
|
"step": 1242
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.00041171347621326627,
|
|
"loss": 2.1211,
|
|
"step": 1243
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.00041098255011956465,
|
|
"loss": 2.1289,
|
|
"step": 1244
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.00041025182043155547,
|
|
"loss": 2.0586,
|
|
"step": 1245
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.000409521288761501,
|
|
"loss": 1.9727,
|
|
"step": 1246
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.00040879095672122646,
|
|
"loss": 2.1348,
|
|
"step": 1247
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.0004080608259221167,
|
|
"loss": 2.1094,
|
|
"step": 1248
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.0004073308979751126,
|
|
"loss": 2.0703,
|
|
"step": 1249
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.0004066011744907074,
|
|
"loss": 2.0977,
|
|
"step": 1250
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.00040587165707894326,
|
|
"loss": 2.1406,
|
|
"step": 1251
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.0004051423473494076,
|
|
"loss": 1.9941,
|
|
"step": 1252
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.0004044132469112299,
|
|
"loss": 2.0059,
|
|
"step": 1253
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.00040368435737307733,
|
|
"loss": 1.9844,
|
|
"step": 1254
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.00040295568034315224,
|
|
"loss": 2.127,
|
|
"step": 1255
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.0004022272174291878,
|
|
"loss": 2.0898,
|
|
"step": 1256
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.0004014989702384449,
|
|
"loss": 2.0859,
|
|
"step": 1257
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.00040077094037770843,
|
|
"loss": 2.0137,
|
|
"step": 1258
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.0004000431294532838,
|
|
"loss": 2.1094,
|
|
"step": 1259
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.0003993155390709935,
|
|
"loss": 2.0215,
|
|
"step": 1260
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.0003985881708361729,
|
|
"loss": 2.1211,
|
|
"step": 1261
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.00039786102635366784,
|
|
"loss": 2.0,
|
|
"step": 1262
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.0003971341072278302,
|
|
"loss": 2.043,
|
|
"step": 1263
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.00039640741506251457,
|
|
"loss": 2.0527,
|
|
"step": 1264
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.00039568095146107495,
|
|
"loss": 2.1484,
|
|
"step": 1265
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.00039495471802636096,
|
|
"loss": 2.0508,
|
|
"step": 1266
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.0003942287163607145,
|
|
"loss": 2.1055,
|
|
"step": 1267
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.0003935029480659658,
|
|
"loss": 2.0957,
|
|
"step": 1268
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.00039277741474343054,
|
|
"loss": 2.0352,
|
|
"step": 1269
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.0003920521179939057,
|
|
"loss": 2.1367,
|
|
"step": 1270
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.00039132705941766644,
|
|
"loss": 2.1191,
|
|
"step": 1271
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.0003906022406144624,
|
|
"loss": 2.0723,
|
|
"step": 1272
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.0003898776631835143,
|
|
"loss": 2.0742,
|
|
"step": 1273
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.00038915332872350994,
|
|
"loss": 2.0566,
|
|
"step": 1274
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.00038842923883260135,
|
|
"loss": 2.0703,
|
|
"step": 1275
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.00038770539510840093,
|
|
"loss": 2.0977,
|
|
"step": 1276
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.00038698179914797783,
|
|
"loss": 1.998,
|
|
"step": 1277
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.0003862584525478545,
|
|
"loss": 2.1992,
|
|
"step": 1278
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.00038553535690400353,
|
|
"loss": 2.1211,
|
|
"step": 1279
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.00038481251381184355,
|
|
"loss": 2.1719,
|
|
"step": 1280
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.00038408992486623584,
|
|
"loss": 2.0508,
|
|
"step": 1281
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.00038336759166148117,
|
|
"loss": 1.998,
|
|
"step": 1282
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.0003826455157913159,
|
|
"loss": 2.1172,
|
|
"step": 1283
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.00038192369884890886,
|
|
"loss": 2.0293,
|
|
"step": 1284
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.00038120214242685723,
|
|
"loss": 2.0332,
|
|
"step": 1285
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.00038048084811718373,
|
|
"loss": 2.1172,
|
|
"step": 1286
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.0003797598175113327,
|
|
"loss": 2.0352,
|
|
"step": 1287
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.0003790390522001662,
|
|
"loss": 2.1523,
|
|
"step": 1288
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.0003783185537739615,
|
|
"loss": 1.9961,
|
|
"step": 1289
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.00037759832382240697,
|
|
"loss": 1.9668,
|
|
"step": 1290
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.00037687836393459826,
|
|
"loss": 2.1211,
|
|
"step": 1291
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.0003761586756990354,
|
|
"loss": 2.0586,
|
|
"step": 1292
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.0003754392607036191,
|
|
"loss": 2.1445,
|
|
"step": 1293
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.0003747201205356472,
|
|
"loss": 1.9688,
|
|
"step": 1294
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.0003740012567818111,
|
|
"loss": 2.1016,
|
|
"step": 1295
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.0003732826710281922,
|
|
"loss": 2.0234,
|
|
"step": 1296
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.0003725643648602588,
|
|
"loss": 2.0254,
|
|
"step": 1297
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.0003718463398628621,
|
|
"loss": 2.1328,
|
|
"step": 1298
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.0003711285976202331,
|
|
"loss": 2.0703,
|
|
"step": 1299
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.0003704111397159787,
|
|
"loss": 2.0527,
|
|
"step": 1300
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.0003696939677330788,
|
|
"loss": 2.0449,
|
|
"step": 1301
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.00036897708325388213,
|
|
"loss": 2.0352,
|
|
"step": 1302
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.0003682604878601034,
|
|
"loss": 2.0195,
|
|
"step": 1303
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.000367544183132819,
|
|
"loss": 2.1367,
|
|
"step": 1304
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.0003668281706524645,
|
|
"loss": 2.0371,
|
|
"step": 1305
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.0003661124519988304,
|
|
"loss": 2.1055,
|
|
"step": 1306
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.00036539702875105893,
|
|
"loss": 2.1211,
|
|
"step": 1307
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.0003646819024876406,
|
|
"loss": 2.0,
|
|
"step": 1308
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.0003639670747864107,
|
|
"loss": 2.1113,
|
|
"step": 1309
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.00036325254722454584,
|
|
"loss": 2.0,
|
|
"step": 1310
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.00036253832137855997,
|
|
"loss": 2.1191,
|
|
"step": 1311
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.00036182439882430183,
|
|
"loss": 2.1055,
|
|
"step": 1312
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.00036111078113695096,
|
|
"loss": 2.0566,
|
|
"step": 1313
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.0003603974698910139,
|
|
"loss": 2.0781,
|
|
"step": 1314
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.0003596844666603214,
|
|
"loss": 2.0508,
|
|
"step": 1315
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.0003589717730180245,
|
|
"loss": 2.0312,
|
|
"step": 1316
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.00035825939053659117,
|
|
"loss": 2.1406,
|
|
"step": 1317
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.00035754732078780273,
|
|
"loss": 2.082,
|
|
"step": 1318
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.00035683556534275076,
|
|
"loss": 2.0469,
|
|
"step": 1319
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.00035612412577183303,
|
|
"loss": 1.998,
|
|
"step": 1320
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.00035541300364475063,
|
|
"loss": 2.0664,
|
|
"step": 1321
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.0003547022005305043,
|
|
"loss": 1.9727,
|
|
"step": 1322
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.0003539917179973907,
|
|
"loss": 2.0566,
|
|
"step": 1323
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.00035328155761299917,
|
|
"loss": 2.0098,
|
|
"step": 1324
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.0003525717209442085,
|
|
"loss": 2.084,
|
|
"step": 1325
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.00035186220955718306,
|
|
"loss": 2.0625,
|
|
"step": 1326
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.0003511530250173696,
|
|
"loss": 2.1445,
|
|
"step": 1327
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.00035044416888949364,
|
|
"loss": 2.0781,
|
|
"step": 1328
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.0003497356427375562,
|
|
"loss": 2.0703,
|
|
"step": 1329
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.00034902744812483034,
|
|
"loss": 2.1289,
|
|
"step": 1330
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.00034831958661385714,
|
|
"loss": 2.1172,
|
|
"step": 1331
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.0003476120597664434,
|
|
"loss": 2.0352,
|
|
"step": 1332
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.00034690486914365704,
|
|
"loss": 2.0996,
|
|
"step": 1333
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.00034619801630582435,
|
|
"loss": 2.0684,
|
|
"step": 1334
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.00034549150281252633,
|
|
"loss": 1.9902,
|
|
"step": 1335
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.0003447853302225952,
|
|
"loss": 2.0879,
|
|
"step": 1336
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.00034407950009411126,
|
|
"loss": 1.9844,
|
|
"step": 1337
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.00034337401398439873,
|
|
"loss": 2.0723,
|
|
"step": 1338
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.00034266887345002305,
|
|
"loss": 2.043,
|
|
"step": 1339
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.0003419640800467874,
|
|
"loss": 2.0801,
|
|
"step": 1340
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.0003412596353297288,
|
|
"loss": 1.9961,
|
|
"step": 1341
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.00034055554085311493,
|
|
"loss": 2.0957,
|
|
"step": 1342
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.00033985179817044105,
|
|
"loss": 2.0586,
|
|
"step": 1343
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.0003391484088344257,
|
|
"loss": 2.1172,
|
|
"step": 1344
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.00033844537439700807,
|
|
"loss": 2.0488,
|
|
"step": 1345
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.00033774269640934445,
|
|
"loss": 2.0254,
|
|
"step": 1346
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.0003370403764218045,
|
|
"loss": 2.0527,
|
|
"step": 1347
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.000336338415983968,
|
|
"loss": 2.0781,
|
|
"step": 1348
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.00033563681664462155,
|
|
"loss": 2.0059,
|
|
"step": 1349
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.000334935579951755,
|
|
"loss": 2.0117,
|
|
"step": 1350
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.0003342347074525578,
|
|
"loss": 2.1055,
|
|
"step": 1351
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.0003335342006934161,
|
|
"loss": 2.082,
|
|
"step": 1352
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.00033283406121990914,
|
|
"loss": 2.0176,
|
|
"step": 1353
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.0003321342905768057,
|
|
"loss": 2.0391,
|
|
"step": 1354
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.00033143489030806086,
|
|
"loss": 2.0742,
|
|
"step": 1355
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.00033073586195681227,
|
|
"loss": 2.1406,
|
|
"step": 1356
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.00033003720706537736,
|
|
"loss": 2.0,
|
|
"step": 1357
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.0003293389271752492,
|
|
"loss": 2.0,
|
|
"step": 1358
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.00032864102382709374,
|
|
"loss": 2.0117,
|
|
"step": 1359
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.000327943498560746,
|
|
"loss": 2.0215,
|
|
"step": 1360
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.00032724635291520694,
|
|
"loss": 2.0645,
|
|
"step": 1361
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.00032654958842863967,
|
|
"loss": 2.084,
|
|
"step": 1362
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.0003258532066383667,
|
|
"loss": 2.0938,
|
|
"step": 1363
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.000325157209080866,
|
|
"loss": 2.0469,
|
|
"step": 1364
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.00032446159729176743,
|
|
"loss": 2.0449,
|
|
"step": 1365
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.0003237663728058502,
|
|
"loss": 2.2305,
|
|
"step": 1366
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.0003230715371570389,
|
|
"loss": 2.0645,
|
|
"step": 1367
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.00032237709187839996,
|
|
"loss": 2.0527,
|
|
"step": 1368
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.0003216830385021388,
|
|
"loss": 1.9746,
|
|
"step": 1369
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.0003209893785595959,
|
|
"loss": 2.0,
|
|
"step": 1370
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.00032029611358124366,
|
|
"loss": 1.9688,
|
|
"step": 1371
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.00031960324509668336,
|
|
"loss": 2.1348,
|
|
"step": 1372
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.0003189107746346412,
|
|
"loss": 2.0977,
|
|
"step": 1373
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.0003182187037229653,
|
|
"loss": 2.0078,
|
|
"step": 1374
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.0003175270338886221,
|
|
"loss": 2.0547,
|
|
"step": 1375
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.00031683576665769345,
|
|
"loss": 2.082,
|
|
"step": 1376
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.0003161449035553724,
|
|
"loss": 2.1113,
|
|
"step": 1377
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.00031545444610596077,
|
|
"loss": 2.041,
|
|
"step": 1378
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.000314764395832865,
|
|
"loss": 2.0977,
|
|
"step": 1379
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.0003140747542585934,
|
|
"loss": 2.0781,
|
|
"step": 1380
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.00031338552290475266,
|
|
"loss": 2.0469,
|
|
"step": 1381
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.00031269670329204396,
|
|
"loss": 2.1094,
|
|
"step": 1382
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.0003120082969402604,
|
|
"loss": 2.0781,
|
|
"step": 1383
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.00031132030536828314,
|
|
"loss": 2.0293,
|
|
"step": 1384
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.00031063273009407805,
|
|
"loss": 1.9395,
|
|
"step": 1385
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.00030994557263469265,
|
|
"loss": 2.082,
|
|
"step": 1386
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.0003092588345062526,
|
|
"loss": 2.1289,
|
|
"step": 1387
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.0003085725172239582,
|
|
"loss": 2.0488,
|
|
"step": 1388
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.0003078866223020815,
|
|
"loss": 2.0137,
|
|
"step": 1389
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.0003072011512539624,
|
|
"loss": 2.1172,
|
|
"step": 1390
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.00030651610559200574,
|
|
"loss": 2.0879,
|
|
"step": 1391
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.00030583148682767757,
|
|
"loss": 2.0449,
|
|
"step": 1392
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.00030514729647150243,
|
|
"loss": 2.0664,
|
|
"step": 1393
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.0003044635360330592,
|
|
"loss": 2.0449,
|
|
"step": 1394
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.00030378020702097845,
|
|
"loss": 2.1387,
|
|
"step": 1395
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.000303097310942939,
|
|
"loss": 2.0273,
|
|
"step": 1396
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.0003024148493056641,
|
|
"loss": 2.0703,
|
|
"step": 1397
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.00030173282361491865,
|
|
"loss": 2.0645,
|
|
"step": 1398
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.0003010512353755057,
|
|
"loss": 2.0586,
|
|
"step": 1399
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.00030037008609126313,
|
|
"loss": 2.0703,
|
|
"step": 1400
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.0002996893772650602,
|
|
"loss": 1.9883,
|
|
"step": 1401
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.0002990091103987945,
|
|
"loss": 2.0977,
|
|
"step": 1402
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.0002983292869933886,
|
|
"loss": 2.043,
|
|
"step": 1403
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.0002976499085487862,
|
|
"loss": 2.0664,
|
|
"step": 1404
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.00029697097656394963,
|
|
"loss": 2.0059,
|
|
"step": 1405
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.00029629249253685595,
|
|
"loss": 2.0938,
|
|
"step": 1406
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.00029561445796449416,
|
|
"loss": 2.0586,
|
|
"step": 1407
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.0002949368743428612,
|
|
"loss": 2.0586,
|
|
"step": 1408
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.0002942597431669593,
|
|
"loss": 2.0078,
|
|
"step": 1409
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.0002935830659307924,
|
|
"loss": 2.0117,
|
|
"step": 1410
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.0002929068441273629,
|
|
"loss": 2.1055,
|
|
"step": 1411
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.0002922310792486681,
|
|
"loss": 2.0859,
|
|
"step": 1412
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.00029155577278569745,
|
|
"loss": 1.9961,
|
|
"step": 1413
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.00029088092622842895,
|
|
"loss": 2.0977,
|
|
"step": 1414
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.00029020654106582544,
|
|
"loss": 2.0508,
|
|
"step": 1415
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.0002895326187858326,
|
|
"loss": 1.998,
|
|
"step": 1416
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.00028885916087537377,
|
|
"loss": 2.0742,
|
|
"step": 1417
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.00028818616882034877,
|
|
"loss": 2.043,
|
|
"step": 1418
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.0002875136441056286,
|
|
"loss": 2.0469,
|
|
"step": 1419
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.000286841588215054,
|
|
"loss": 2.0449,
|
|
"step": 1420
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.0002861700026314308,
|
|
"loss": 2.0703,
|
|
"step": 1421
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.00028549888883652686,
|
|
"loss": 2.1758,
|
|
"step": 1422
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.00028482824831107,
|
|
"loss": 2.0898,
|
|
"step": 1423
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.000284158082534743,
|
|
"loss": 2.0273,
|
|
"step": 1424
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.00028348839298618177,
|
|
"loss": 2.0859,
|
|
"step": 1425
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.0002828191811429709,
|
|
"loss": 2.1094,
|
|
"step": 1426
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.00028215044848164164,
|
|
"loss": 1.9922,
|
|
"step": 1427
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.00028148219647766747,
|
|
"loss": 2.043,
|
|
"step": 1428
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.00028081442660546124,
|
|
"loss": 2.043,
|
|
"step": 1429
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.0002801471403383728,
|
|
"loss": 1.9551,
|
|
"step": 1430
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.00027948033914868415,
|
|
"loss": 2.0605,
|
|
"step": 1431
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.00027881402450760775,
|
|
"loss": 1.9902,
|
|
"step": 1432
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.00027814819788528165,
|
|
"loss": 2.0234,
|
|
"step": 1433
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.00027748286075076836,
|
|
"loss": 2.0859,
|
|
"step": 1434
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.00027681801457204937,
|
|
"loss": 2.0156,
|
|
"step": 1435
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.00027615366081602306,
|
|
"loss": 2.0273,
|
|
"step": 1436
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.0002754898009485021,
|
|
"loss": 2.168,
|
|
"step": 1437
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.0002748264364342085,
|
|
"loss": 2.0,
|
|
"step": 1438
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.00027416356873677204,
|
|
"loss": 2.0469,
|
|
"step": 1439
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.0002735011993187258,
|
|
"loss": 2.0254,
|
|
"step": 1440
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.0002728393296415042,
|
|
"loss": 2.0938,
|
|
"step": 1441
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.00027217796116543817,
|
|
"loss": 1.9785,
|
|
"step": 1442
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.0002715170953497532,
|
|
"loss": 2.0508,
|
|
"step": 1443
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.00027085673365256614,
|
|
"loss": 2.0312,
|
|
"step": 1444
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.00027019687753088075,
|
|
"loss": 2.0254,
|
|
"step": 1445
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.00026953752844058597,
|
|
"loss": 2.0977,
|
|
"step": 1446
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.0002688786878364516,
|
|
"loss": 2.041,
|
|
"step": 1447
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.00026822035717212597,
|
|
"loss": 2.0742,
|
|
"step": 1448
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.00026756253790013193,
|
|
"loss": 2.0566,
|
|
"step": 1449
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.0002669052314718641,
|
|
"loss": 2.1133,
|
|
"step": 1450
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.0002662484393375855,
|
|
"loss": 2.1055,
|
|
"step": 1451
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.00026559216294642446,
|
|
"loss": 2.0977,
|
|
"step": 1452
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.0002649364037463718,
|
|
"loss": 2.0977,
|
|
"step": 1453
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.0002642811631842764,
|
|
"loss": 2.0527,
|
|
"step": 1454
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.0002636264427058439,
|
|
"loss": 1.9746,
|
|
"step": 1455
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.00026297224375563123,
|
|
"loss": 2.0586,
|
|
"step": 1456
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.00026231856777704575,
|
|
"loss": 1.9961,
|
|
"step": 1457
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.00026166541621234026,
|
|
"loss": 2.0273,
|
|
"step": 1458
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.00026101279050261045,
|
|
"loss": 2.0664,
|
|
"step": 1459
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.00026036069208779247,
|
|
"loss": 2.125,
|
|
"step": 1460
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.0002597091224066581,
|
|
"loss": 2.0488,
|
|
"step": 1461
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.00025905808289681365,
|
|
"loss": 1.9844,
|
|
"step": 1462
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.0002584075749946946,
|
|
"loss": 2.0352,
|
|
"step": 1463
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.00025775760013556424,
|
|
"loss": 2.043,
|
|
"step": 1464
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.0002571081597535095,
|
|
"loss": 2.0215,
|
|
"step": 1465
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.00025645925528143776,
|
|
"loss": 2.0332,
|
|
"step": 1466
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.0002558108881510747,
|
|
"loss": 2.0234,
|
|
"step": 1467
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.00025516305979295963,
|
|
"loss": 2.082,
|
|
"step": 1468
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.0002545157716364439,
|
|
"loss": 2.0645,
|
|
"step": 1469
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.00025386902510968624,
|
|
"loss": 2.0117,
|
|
"step": 1470
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.00025322282163965095,
|
|
"loss": 2.0898,
|
|
"step": 1471
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.00025257716265210384,
|
|
"loss": 2.041,
|
|
"step": 1472
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.0002519320495716091,
|
|
"loss": 2.125,
|
|
"step": 1473
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.00025128748382152716,
|
|
"loss": 2.0469,
|
|
"step": 1474
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.00025064346682401016,
|
|
"loss": 2.1016,
|
|
"step": 1475
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.0002500000000000001,
|
|
"loss": 1.9941,
|
|
"step": 1476
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.0002493570847692246,
|
|
"loss": 2.1367,
|
|
"step": 1477
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.00024871472255019424,
|
|
"loss": 2.0684,
|
|
"step": 1478
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.00024807291476019994,
|
|
"loss": 2.0859,
|
|
"step": 1479
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.00024743166281530877,
|
|
"loss": 1.9688,
|
|
"step": 1480
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.000246790968130362,
|
|
"loss": 2.0156,
|
|
"step": 1481
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.0002461508321189706,
|
|
"loss": 1.9629,
|
|
"step": 1482
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.00024551125619351385,
|
|
"loss": 2.0762,
|
|
"step": 1483
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.00024487224176513453,
|
|
"loss": 2.043,
|
|
"step": 1484
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.0002442337902437365,
|
|
"loss": 1.9395,
|
|
"step": 1485
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.0002435959030379824,
|
|
"loss": 2.168,
|
|
"step": 1486
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.00024295858155528888,
|
|
"loss": 2.0234,
|
|
"step": 1487
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.00024232182720182523,
|
|
"loss": 2.1055,
|
|
"step": 1488
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.00024168564138250855,
|
|
"loss": 2.0742,
|
|
"step": 1489
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.00024105002550100246,
|
|
"loss": 2.0586,
|
|
"step": 1490
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.00024041498095971254,
|
|
"loss": 2.0625,
|
|
"step": 1491
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.0002397805091597835,
|
|
"loss": 2.0488,
|
|
"step": 1492
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.0002391466115010973,
|
|
"loss": 2.1172,
|
|
"step": 1493
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.00023851328938226808,
|
|
"loss": 2.0293,
|
|
"step": 1494
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.00023788054420064109,
|
|
"loss": 2.0762,
|
|
"step": 1495
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.00023724837735228773,
|
|
"loss": 2.0859,
|
|
"step": 1496
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.00023661679023200422,
|
|
"loss": 1.9531,
|
|
"step": 1497
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.00023598578423330714,
|
|
"loss": 2.0254,
|
|
"step": 1498
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.00023535536074843083,
|
|
"loss": 2.0,
|
|
"step": 1499
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.00023472552116832502,
|
|
"loss": 2.0703,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.0002340962668826503,
|
|
"loss": 2.0195,
|
|
"step": 1501
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.00023346759927977663,
|
|
"loss": 2.0645,
|
|
"step": 1502
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.0002328395197467789,
|
|
"loss": 2.0703,
|
|
"step": 1503
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.00023221202966943515,
|
|
"loss": 2.0293,
|
|
"step": 1504
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.0002315851304322223,
|
|
"loss": 2.1328,
|
|
"step": 1505
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.0002309588234183137,
|
|
"loss": 2.0547,
|
|
"step": 1506
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.00023033311000957653,
|
|
"loss": 2.0,
|
|
"step": 1507
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.00022970799158656758,
|
|
"loss": 1.9902,
|
|
"step": 1508
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.0002290834695285316,
|
|
"loss": 2.125,
|
|
"step": 1509
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.00022845954521339678,
|
|
"loss": 2.0352,
|
|
"step": 1510
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.0002278362200177732,
|
|
"loss": 2.043,
|
|
"step": 1511
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.00022721349531694852,
|
|
"loss": 2.0547,
|
|
"step": 1512
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.0002265913724848855,
|
|
"loss": 2.082,
|
|
"step": 1513
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.00022596985289421946,
|
|
"loss": 1.9688,
|
|
"step": 1514
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.00022534893791625405,
|
|
"loss": 1.998,
|
|
"step": 1515
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.00022472862892095968,
|
|
"loss": 2.0859,
|
|
"step": 1516
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.00022410892727696896,
|
|
"loss": 2.0371,
|
|
"step": 1517
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.0002234898343515751,
|
|
"loss": 2.0684,
|
|
"step": 1518
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.00022287135151072792,
|
|
"loss": 2.0176,
|
|
"step": 1519
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.00022225348011903096,
|
|
"loss": 2.1094,
|
|
"step": 1520
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.0002216362215397393,
|
|
"loss": 2.0762,
|
|
"step": 1521
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.00022101957713475522,
|
|
"loss": 2.0098,
|
|
"step": 1522
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.00022040354826462666,
|
|
"loss": 2.041,
|
|
"step": 1523
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.0002197881362885426,
|
|
"loss": 2.082,
|
|
"step": 1524
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.0002191733425643318,
|
|
"loss": 2.0176,
|
|
"step": 1525
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.00021855916844845826,
|
|
"loss": 2.0234,
|
|
"step": 1526
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.00021794561529601898,
|
|
"loss": 1.9551,
|
|
"step": 1527
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.00021733268446074138,
|
|
"loss": 1.9629,
|
|
"step": 1528
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.00021672037729497917,
|
|
"loss": 2.0859,
|
|
"step": 1529
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.0002161086951497106,
|
|
"loss": 2.0449,
|
|
"step": 1530
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.00021549763937453442,
|
|
"loss": 2.0293,
|
|
"step": 1531
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.00021488721131766736,
|
|
"loss": 2.1172,
|
|
"step": 1532
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.00021427741232594183,
|
|
"loss": 2.0996,
|
|
"step": 1533
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.0002136682437448013,
|
|
"loss": 2.1445,
|
|
"step": 1534
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.0002130597069182994,
|
|
"loss": 2.1055,
|
|
"step": 1535
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.0002124518031890948,
|
|
"loss": 2.0645,
|
|
"step": 1536
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.0002118445338984502,
|
|
"loss": 1.9375,
|
|
"step": 1537
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.00021123790038622808,
|
|
"loss": 2.0156,
|
|
"step": 1538
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.0002106319039908879,
|
|
"loss": 2.0469,
|
|
"step": 1539
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.00021002654604948412,
|
|
"loss": 2.0332,
|
|
"step": 1540
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.00020942182789766172,
|
|
"loss": 2.0781,
|
|
"step": 1541
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.00020881775086965492,
|
|
"loss": 2.0254,
|
|
"step": 1542
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.00020821431629828246,
|
|
"loss": 2.0586,
|
|
"step": 1543
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.00020761152551494643,
|
|
"loss": 2.0703,
|
|
"step": 1544
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.00020700937984962798,
|
|
"loss": 2.0938,
|
|
"step": 1545
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.0002064078806308848,
|
|
"loss": 1.9922,
|
|
"step": 1546
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.00020580702918584882,
|
|
"loss": 2.002,
|
|
"step": 1547
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.000205206826840222,
|
|
"loss": 2.082,
|
|
"step": 1548
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.0002046072749182751,
|
|
"loss": 2.0566,
|
|
"step": 1549
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.00020400837474284273,
|
|
"loss": 2.0586,
|
|
"step": 1550
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.0002034101276353224,
|
|
"loss": 2.0703,
|
|
"step": 1551
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.00020281253491567027,
|
|
"loss": 2.0625,
|
|
"step": 1552
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.0002022155979023984,
|
|
"loss": 2.084,
|
|
"step": 1553
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.000201619317912573,
|
|
"loss": 2.0957,
|
|
"step": 1554
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.00020102369626180962,
|
|
"loss": 2.0273,
|
|
"step": 1555
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.0002004287342642721,
|
|
"loss": 2.041,
|
|
"step": 1556
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.00019983443323266824,
|
|
"loss": 2.0449,
|
|
"step": 1557
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.00019924079447824805,
|
|
"loss": 2.0859,
|
|
"step": 1558
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.00019864781931079977,
|
|
"loss": 2.0078,
|
|
"step": 1559
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.00019805550903864773,
|
|
"loss": 2.1211,
|
|
"step": 1560
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.00019746386496864948,
|
|
"loss": 2.0059,
|
|
"step": 1561
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.00019687288840619226,
|
|
"loss": 2.0391,
|
|
"step": 1562
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.0001962825806551911,
|
|
"loss": 2.0586,
|
|
"step": 1563
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.0001956929430180846,
|
|
"loss": 2.0586,
|
|
"step": 1564
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.00019510397679583374,
|
|
"loss": 2.0859,
|
|
"step": 1565
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.0001945156832879174,
|
|
"loss": 2.0488,
|
|
"step": 1566
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.00019392806379233036,
|
|
"loss": 1.9512,
|
|
"step": 1567
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.00019334111960558065,
|
|
"loss": 2.0625,
|
|
"step": 1568
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.00019275485202268573,
|
|
"loss": 2.1367,
|
|
"step": 1569
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.00019216926233717085,
|
|
"loss": 2.0312,
|
|
"step": 1570
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.00019158435184106498,
|
|
"loss": 2.0664,
|
|
"step": 1571
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.00019100012182489905,
|
|
"loss": 2.0605,
|
|
"step": 1572
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.00019041657357770226,
|
|
"loss": 2.0332,
|
|
"step": 1573
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.00018983370838699943,
|
|
"loss": 2.0664,
|
|
"step": 1574
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.00018925152753880892,
|
|
"loss": 2.0742,
|
|
"step": 1575
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.00018867003231763847,
|
|
"loss": 2.1406,
|
|
"step": 1576
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.00018808922400648375,
|
|
"loss": 2.1172,
|
|
"step": 1577
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.00018750910388682428,
|
|
"loss": 2.0938,
|
|
"step": 1578
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.00018692967323862125,
|
|
"loss": 2.0488,
|
|
"step": 1579
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.00018635093334031517,
|
|
"loss": 2.0547,
|
|
"step": 1580
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.00018577288546882165,
|
|
"loss": 1.9863,
|
|
"step": 1581
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.00018519553089953023,
|
|
"loss": 2.0332,
|
|
"step": 1582
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.0001846188709063001,
|
|
"loss": 2.0059,
|
|
"step": 1583
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.00018404290676145857,
|
|
"loss": 2.0312,
|
|
"step": 1584
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.00018346763973579722,
|
|
"loss": 2.0078,
|
|
"step": 1585
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.00018289307109856939,
|
|
"loss": 2.1367,
|
|
"step": 1586
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.0001823192021174882,
|
|
"loss": 2.0586,
|
|
"step": 1587
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.0001817460340587223,
|
|
"loss": 2.0469,
|
|
"step": 1588
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.00018117356818689445,
|
|
"loss": 2.0957,
|
|
"step": 1589
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.00018060180576507756,
|
|
"loss": 2.0234,
|
|
"step": 1590
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.00018003074805479313,
|
|
"loss": 1.9609,
|
|
"step": 1591
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.00017946039631600724,
|
|
"loss": 2.082,
|
|
"step": 1592
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.00017889075180712837,
|
|
"loss": 2.082,
|
|
"step": 1593
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.00017832181578500512,
|
|
"loss": 2.0547,
|
|
"step": 1594
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.0001777535895049221,
|
|
"loss": 1.9961,
|
|
"step": 1595
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.0001771860742205988,
|
|
"loss": 2.0234,
|
|
"step": 1596
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.00017661927118418525,
|
|
"loss": 2.082,
|
|
"step": 1597
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.00017605318164626066,
|
|
"loss": 2.1055,
|
|
"step": 1598
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.00017548780685582949,
|
|
"loss": 2.1289,
|
|
"step": 1599
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.00017492314806031922,
|
|
"loss": 2.1406,
|
|
"step": 1600
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.00017435920650557806,
|
|
"loss": 2.0332,
|
|
"step": 1601
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.00017379598343587112,
|
|
"loss": 2.0234,
|
|
"step": 1602
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.00017323348009387878,
|
|
"loss": 1.9941,
|
|
"step": 1603
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.0001726716977206929,
|
|
"loss": 2.0078,
|
|
"step": 1604
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.00017211063755581525,
|
|
"loss": 1.9668,
|
|
"step": 1605
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.0001715503008371536,
|
|
"loss": 2.043,
|
|
"step": 1606
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.0001709906888010196,
|
|
"loss": 1.9434,
|
|
"step": 1607
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.00017043180268212638,
|
|
"loss": 2.125,
|
|
"step": 1608
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.00016987364371358481,
|
|
"loss": 2.0449,
|
|
"step": 1609
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.00016931621312690214,
|
|
"loss": 2.0156,
|
|
"step": 1610
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.00016875951215197777,
|
|
"loss": 2.0742,
|
|
"step": 1611
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.00016820354201710214,
|
|
"loss": 2.0938,
|
|
"step": 1612
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.00016764830394895203,
|
|
"loss": 1.9238,
|
|
"step": 1613
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.00016709379917259027,
|
|
"loss": 2.0137,
|
|
"step": 1614
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.00016654002891146091,
|
|
"loss": 2.0859,
|
|
"step": 1615
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.00016598699438738764,
|
|
"loss": 2.082,
|
|
"step": 1616
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.00016543469682057105,
|
|
"loss": 2.1484,
|
|
"step": 1617
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.00016488313742958526,
|
|
"loss": 2.1367,
|
|
"step": 1618
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.00016433231743137646,
|
|
"loss": 1.9629,
|
|
"step": 1619
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.0001637822380412584,
|
|
"loss": 2.0508,
|
|
"step": 1620
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.00016323290047291195,
|
|
"loss": 2.0391,
|
|
"step": 1621
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.0001626843059383803,
|
|
"loss": 2.0977,
|
|
"step": 1622
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.00016213645564806752,
|
|
"loss": 2.0898,
|
|
"step": 1623
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.0001615893508107359,
|
|
"loss": 2.0,
|
|
"step": 1624
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.00016104299263350252,
|
|
"loss": 2.0547,
|
|
"step": 1625
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.00016049738232183758,
|
|
"loss": 2.0352,
|
|
"step": 1626
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.0001599525210795606,
|
|
"loss": 2.0469,
|
|
"step": 1627
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.00015940841010883889,
|
|
"loss": 2.0137,
|
|
"step": 1628
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.00015886505061018413,
|
|
"loss": 1.9863,
|
|
"step": 1629
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.0001583224437824498,
|
|
"loss": 2.0137,
|
|
"step": 1630
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.0001577805908228293,
|
|
"loss": 2.0078,
|
|
"step": 1631
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.00015723949292685191,
|
|
"loss": 1.9902,
|
|
"step": 1632
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.0001566991512883818,
|
|
"loss": 1.9746,
|
|
"step": 1633
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.00015615956709961378,
|
|
"loss": 1.9688,
|
|
"step": 1634
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.00015562074155107215,
|
|
"loss": 2.0273,
|
|
"step": 1635
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.0001550826758316068,
|
|
"loss": 2.0352,
|
|
"step": 1636
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.00015454537112839122,
|
|
"loss": 1.9902,
|
|
"step": 1637
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.00015400882862692033,
|
|
"loss": 2.0566,
|
|
"step": 1638
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.00015347304951100665,
|
|
"loss": 2.0078,
|
|
"step": 1639
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.00015293803496277907,
|
|
"loss": 2.0645,
|
|
"step": 1640
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.00015240378616267886,
|
|
"loss": 2.0215,
|
|
"step": 1641
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.00015187030428945843,
|
|
"loss": 1.959,
|
|
"step": 1642
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.0001513375905201776,
|
|
"loss": 2.1172,
|
|
"step": 1643
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.00015080564603020142,
|
|
"loss": 2.1133,
|
|
"step": 1644
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.0001502744719931982,
|
|
"loss": 2.0859,
|
|
"step": 1645
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.00014974406958113558,
|
|
"loss": 2.0254,
|
|
"step": 1646
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.00014921443996427947,
|
|
"loss": 1.9824,
|
|
"step": 1647
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.0001486855843111901,
|
|
"loss": 2.0879,
|
|
"step": 1648
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.0001481575037887201,
|
|
"loss": 2.082,
|
|
"step": 1649
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.00014763019956201253,
|
|
"loss": 2.0371,
|
|
"step": 1650
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.0001471036727944966,
|
|
"loss": 2.0234,
|
|
"step": 1651
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.0001465779246478872,
|
|
"loss": 2.0117,
|
|
"step": 1652
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.00014605295628218045,
|
|
"loss": 2.0391,
|
|
"step": 1653
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.0001455287688556527,
|
|
"loss": 2.0098,
|
|
"step": 1654
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.00014500536352485673,
|
|
"loss": 2.0176,
|
|
"step": 1655
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.00014448274144461965,
|
|
"loss": 2.0059,
|
|
"step": 1656
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.00014396090376804112,
|
|
"loss": 2.0898,
|
|
"step": 1657
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.00014343985164648926,
|
|
"loss": 1.9746,
|
|
"step": 1658
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.00014291958622959973,
|
|
"loss": 1.9922,
|
|
"step": 1659
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.00014240010866527176,
|
|
"loss": 2.1094,
|
|
"step": 1660
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.00014188142009966686,
|
|
"loss": 1.9785,
|
|
"step": 1661
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.0001413635216772053,
|
|
"loss": 2.041,
|
|
"step": 1662
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.000140846414540564,
|
|
"loss": 2.0,
|
|
"step": 1663
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.00014033009983067452,
|
|
"loss": 1.9727,
|
|
"step": 1664
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.00013981457868671927,
|
|
"loss": 2.0254,
|
|
"step": 1665
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.0001392998522461305,
|
|
"loss": 2.1211,
|
|
"step": 1666
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.00013878592164458635,
|
|
"loss": 1.9902,
|
|
"step": 1667
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.00013827278801600978,
|
|
"loss": 2.082,
|
|
"step": 1668
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.0001377604524925647,
|
|
"loss": 2.1035,
|
|
"step": 1669
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.00013724891620465424,
|
|
"loss": 2.0391,
|
|
"step": 1670
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.0001367381802809185,
|
|
"loss": 1.9316,
|
|
"step": 1671
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.00013622824584823113,
|
|
"loss": 2.0352,
|
|
"step": 1672
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.00013571911403169795,
|
|
"loss": 2.041,
|
|
"step": 1673
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.0001352107859546533,
|
|
"loss": 1.998,
|
|
"step": 1674
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.00013470326273865886,
|
|
"loss": 2.0938,
|
|
"step": 1675
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.00013419654550349985,
|
|
"loss": 2.082,
|
|
"step": 1676
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.00013369063536718346,
|
|
"loss": 2.0254,
|
|
"step": 1677
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.00013318553344593632,
|
|
"loss": 2.0273,
|
|
"step": 1678
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.00013268124085420136,
|
|
"loss": 2.0664,
|
|
"step": 1679
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.0001321777587046364,
|
|
"loss": 2.1055,
|
|
"step": 1680
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.00013167508810811059,
|
|
"loss": 2.0684,
|
|
"step": 1681
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.0001311732301737029,
|
|
"loss": 2.1016,
|
|
"step": 1682
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.0001306721860086991,
|
|
"loss": 2.0488,
|
|
"step": 1683
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.00013017195671858928,
|
|
"loss": 2.0117,
|
|
"step": 1684
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.0001296725434070661,
|
|
"loss": 2.1055,
|
|
"step": 1685
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.00012917394717602121,
|
|
"loss": 2.0762,
|
|
"step": 1686
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.00012867616912554426,
|
|
"loss": 1.9668,
|
|
"step": 1687
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.00012817921035391882,
|
|
"loss": 2.0332,
|
|
"step": 1688
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.00012768307195762168,
|
|
"loss": 2.0527,
|
|
"step": 1689
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.00012718775503131908,
|
|
"loss": 2.0156,
|
|
"step": 1690
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.0001266932606678646,
|
|
"loss": 2.0586,
|
|
"step": 1691
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.00012619958995829756,
|
|
"loss": 2.0508,
|
|
"step": 1692
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.0001257067439918394,
|
|
"loss": 2.0098,
|
|
"step": 1693
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.00012521472385589234,
|
|
"loss": 1.9902,
|
|
"step": 1694
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.00012472353063603626,
|
|
"loss": 1.9902,
|
|
"step": 1695
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.0001242331654160263,
|
|
"loss": 1.9961,
|
|
"step": 1696
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.0001237436292777914,
|
|
"loss": 2.0215,
|
|
"step": 1697
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.00012325492330143061,
|
|
"loss": 2.0391,
|
|
"step": 1698
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.00012276704856521175,
|
|
"loss": 1.9512,
|
|
"step": 1699
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.00012228000614556816,
|
|
"loss": 2.0352,
|
|
"step": 1700
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.00012179379711709738,
|
|
"loss": 2.0879,
|
|
"step": 1701
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.0001213084225525577,
|
|
"loss": 2.0957,
|
|
"step": 1702
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.00012082388352286627,
|
|
"loss": 2.1289,
|
|
"step": 1703
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.00012034018109709716,
|
|
"loss": 2.0957,
|
|
"step": 1704
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.00011985731634247809,
|
|
"loss": 1.9688,
|
|
"step": 1705
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.00011937529032438904,
|
|
"loss": 1.9824,
|
|
"step": 1706
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.00011889410410635887,
|
|
"loss": 1.9727,
|
|
"step": 1707
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.0001184137587500641,
|
|
"loss": 1.9512,
|
|
"step": 1708
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.00011793425531532564,
|
|
"loss": 1.959,
|
|
"step": 1709
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.00011745559486010671,
|
|
"loss": 2.0625,
|
|
"step": 1710
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.00011697777844051105,
|
|
"loss": 2.1172,
|
|
"step": 1711
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.00011650080711077964,
|
|
"loss": 1.9805,
|
|
"step": 1712
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.00011602468192328936,
|
|
"loss": 2.1289,
|
|
"step": 1713
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.00011554940392854973,
|
|
"loss": 2.0586,
|
|
"step": 1714
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.00011507497417520146,
|
|
"loss": 2.0781,
|
|
"step": 1715
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.00011460139371001339,
|
|
"loss": 2.0625,
|
|
"step": 1716
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.00011412866357788049,
|
|
"loss": 2.043,
|
|
"step": 1717
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.00011365678482182207,
|
|
"loss": 1.9375,
|
|
"step": 1718
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.0001131857584829783,
|
|
"loss": 2.0508,
|
|
"step": 1719
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.0001127155856006093,
|
|
"loss": 2.0605,
|
|
"step": 1720
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.00011224626721209141,
|
|
"loss": 1.9258,
|
|
"step": 1721
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.0001117778043529164,
|
|
"loss": 2.1172,
|
|
"step": 1722
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.0001113101980566879,
|
|
"loss": 1.9922,
|
|
"step": 1723
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.00011084344935511958,
|
|
"loss": 1.9961,
|
|
"step": 1724
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.00011037755927803345,
|
|
"loss": 2.0293,
|
|
"step": 1725
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.00010991252885335651,
|
|
"loss": 2.0703,
|
|
"step": 1726
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.00010944835910711958,
|
|
"loss": 2.0703,
|
|
"step": 1727
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.00010898505106345396,
|
|
"loss": 2.084,
|
|
"step": 1728
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.00010852260574459022,
|
|
"loss": 2.0215,
|
|
"step": 1729
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.00010806102417085512,
|
|
"loss": 2.0195,
|
|
"step": 1730
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.00010760030736066951,
|
|
"loss": 2.0293,
|
|
"step": 1731
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.00010714045633054687,
|
|
"loss": 2.0547,
|
|
"step": 1732
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.00010668147209508971,
|
|
"loss": 2.0391,
|
|
"step": 1733
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 0.00010622335566698877,
|
|
"loss": 2.0664,
|
|
"step": 1734
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 0.00010576610805701942,
|
|
"loss": 2.0684,
|
|
"step": 1735
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 0.00010530973027404073,
|
|
"loss": 2.0371,
|
|
"step": 1736
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 0.00010485422332499212,
|
|
"loss": 1.9727,
|
|
"step": 1737
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 0.00010439958821489165,
|
|
"loss": 1.9883,
|
|
"step": 1738
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 0.00010394582594683428,
|
|
"loss": 1.9844,
|
|
"step": 1739
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 0.0001034929375219884,
|
|
"loss": 1.9707,
|
|
"step": 1740
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 0.00010304092393959514,
|
|
"loss": 2.0312,
|
|
"step": 1741
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 0.00010258978619696468,
|
|
"loss": 1.9531,
|
|
"step": 1742
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 0.00010213952528947551,
|
|
"loss": 2.0781,
|
|
"step": 1743
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 0.00010169014221057089,
|
|
"loss": 1.9707,
|
|
"step": 1744
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 0.00010124163795175734,
|
|
"loss": 2.0605,
|
|
"step": 1745
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 0.00010079401350260287,
|
|
"loss": 1.9844,
|
|
"step": 1746
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 0.00010034726985073362,
|
|
"loss": 2.0977,
|
|
"step": 1747
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 9.9901407981833e-05,
|
|
"loss": 2.0625,
|
|
"step": 1748
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 9.94564288796384e-05,
|
|
"loss": 2.0508,
|
|
"step": 1749
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 9.901233352593953e-05,
|
|
"loss": 1.9844,
|
|
"step": 1750
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 9.856912290057668e-05,
|
|
"loss": 2.0137,
|
|
"step": 1751
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 9.812679798143748e-05,
|
|
"loss": 2.0156,
|
|
"step": 1752
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 9.768535974445586e-05,
|
|
"loss": 2.0898,
|
|
"step": 1753
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 9.724480916360906e-05,
|
|
"loss": 1.9375,
|
|
"step": 1754
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 9.68051472109162e-05,
|
|
"loss": 2.0156,
|
|
"step": 1755
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 9.636637485643529e-05,
|
|
"loss": 2.1367,
|
|
"step": 1756
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 9.592849306826174e-05,
|
|
"loss": 2.1133,
|
|
"step": 1757
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 9.549150281252633e-05,
|
|
"loss": 2.0762,
|
|
"step": 1758
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 9.505540505339223e-05,
|
|
"loss": 1.9395,
|
|
"step": 1759
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 9.4620200753054e-05,
|
|
"loss": 2.0332,
|
|
"step": 1760
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 9.418589087173441e-05,
|
|
"loss": 1.9766,
|
|
"step": 1761
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 9.375247636768325e-05,
|
|
"loss": 2.0273,
|
|
"step": 1762
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 9.331995819717443e-05,
|
|
"loss": 2.0547,
|
|
"step": 1763
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 9.288833731450419e-05,
|
|
"loss": 1.9961,
|
|
"step": 1764
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 9.245761467198948e-05,
|
|
"loss": 2.043,
|
|
"step": 1765
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 9.20277912199648e-05,
|
|
"loss": 1.9961,
|
|
"step": 1766
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 9.159886790678123e-05,
|
|
"loss": 2.1055,
|
|
"step": 1767
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 9.11708456788033e-05,
|
|
"loss": 2.0527,
|
|
"step": 1768
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 9.074372548040793e-05,
|
|
"loss": 2.0859,
|
|
"step": 1769
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 9.031750825398145e-05,
|
|
"loss": 1.9785,
|
|
"step": 1770
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 8.98921949399179e-05,
|
|
"loss": 1.9922,
|
|
"step": 1771
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 8.94677864766173e-05,
|
|
"loss": 2.0664,
|
|
"step": 1772
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 8.904428380048269e-05,
|
|
"loss": 2.0,
|
|
"step": 1773
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 8.862168784591929e-05,
|
|
"loss": 2.0527,
|
|
"step": 1774
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 8.819999954533115e-05,
|
|
"loss": 2.1602,
|
|
"step": 1775
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 8.777921982911996e-05,
|
|
"loss": 2.0215,
|
|
"step": 1776
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 8.735934962568253e-05,
|
|
"loss": 1.9961,
|
|
"step": 1777
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 8.694038986140945e-05,
|
|
"loss": 2.0273,
|
|
"step": 1778
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 8.652234146068206e-05,
|
|
"loss": 1.9844,
|
|
"step": 1779
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 8.610520534587086e-05,
|
|
"loss": 2.0449,
|
|
"step": 1780
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 8.568898243733397e-05,
|
|
"loss": 2.0586,
|
|
"step": 1781
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 8.527367365341409e-05,
|
|
"loss": 2.041,
|
|
"step": 1782
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 8.485927991043757e-05,
|
|
"loss": 1.8984,
|
|
"step": 1783
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 8.444580212271125e-05,
|
|
"loss": 2.043,
|
|
"step": 1784
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 8.403324120252159e-05,
|
|
"loss": 2.0586,
|
|
"step": 1785
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 8.362159806013175e-05,
|
|
"loss": 2.0547,
|
|
"step": 1786
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 8.321087360377988e-05,
|
|
"loss": 2.0645,
|
|
"step": 1787
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 8.280106873967752e-05,
|
|
"loss": 1.9941,
|
|
"step": 1788
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 8.239218437200679e-05,
|
|
"loss": 2.0254,
|
|
"step": 1789
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 8.198422140291939e-05,
|
|
"loss": 2.0645,
|
|
"step": 1790
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 8.157718073253351e-05,
|
|
"loss": 2.0293,
|
|
"step": 1791
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 8.117106325893287e-05,
|
|
"loss": 2.0469,
|
|
"step": 1792
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 8.076586987816404e-05,
|
|
"loss": 2.0371,
|
|
"step": 1793
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 8.036160148423449e-05,
|
|
"loss": 2.1641,
|
|
"step": 1794
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 7.995825896911141e-05,
|
|
"loss": 1.9414,
|
|
"step": 1795
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 7.955584322271853e-05,
|
|
"loss": 1.9941,
|
|
"step": 1796
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 7.915435513293523e-05,
|
|
"loss": 2.125,
|
|
"step": 1797
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 7.875379558559387e-05,
|
|
"loss": 2.1211,
|
|
"step": 1798
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 7.835416546447838e-05,
|
|
"loss": 2.0312,
|
|
"step": 1799
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.795546565132167e-05,
|
|
"loss": 1.998,
|
|
"step": 1800
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.755769702580412e-05,
|
|
"loss": 1.9824,
|
|
"step": 1801
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.716086046555193e-05,
|
|
"loss": 1.9941,
|
|
"step": 1802
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.676495684613432e-05,
|
|
"loss": 1.9453,
|
|
"step": 1803
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.636998704106252e-05,
|
|
"loss": 2.0898,
|
|
"step": 1804
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.597595192178702e-05,
|
|
"loss": 2.0332,
|
|
"step": 1805
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.558285235769646e-05,
|
|
"loss": 2.0352,
|
|
"step": 1806
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.519068921611494e-05,
|
|
"loss": 2.0156,
|
|
"step": 1807
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.479946336230047e-05,
|
|
"loss": 2.0488,
|
|
"step": 1808
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.440917565944349e-05,
|
|
"loss": 2.0234,
|
|
"step": 1809
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.4019826968664e-05,
|
|
"loss": 2.0879,
|
|
"step": 1810
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.363141814901053e-05,
|
|
"loss": 2.0332,
|
|
"step": 1811
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.32439500574577e-05,
|
|
"loss": 2.0566,
|
|
"step": 1812
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.285742354890473e-05,
|
|
"loss": 2.0586,
|
|
"step": 1813
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.247183947617325e-05,
|
|
"loss": 2.0273,
|
|
"step": 1814
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.20871986900053e-05,
|
|
"loss": 2.1367,
|
|
"step": 1815
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.170350203906218e-05,
|
|
"loss": 1.998,
|
|
"step": 1816
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.132075036992158e-05,
|
|
"loss": 2.0391,
|
|
"step": 1817
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.093894452707666e-05,
|
|
"loss": 2.043,
|
|
"step": 1818
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.055808535293334e-05,
|
|
"loss": 2.0176,
|
|
"step": 1819
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 7.017817368780888e-05,
|
|
"loss": 2.0391,
|
|
"step": 1820
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 6.979921036993042e-05,
|
|
"loss": 2.0332,
|
|
"step": 1821
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.942119623543202e-05,
|
|
"loss": 2.0762,
|
|
"step": 1822
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.904413211835414e-05,
|
|
"loss": 2.0273,
|
|
"step": 1823
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.866801885064056e-05,
|
|
"loss": 2.0977,
|
|
"step": 1824
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.829285726213769e-05,
|
|
"loss": 1.9902,
|
|
"step": 1825
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.79186481805918e-05,
|
|
"loss": 1.9961,
|
|
"step": 1826
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.754539243164754e-05,
|
|
"loss": 2.0859,
|
|
"step": 1827
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.717309083884654e-05,
|
|
"loss": 2.0137,
|
|
"step": 1828
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.680174422362468e-05,
|
|
"loss": 1.9961,
|
|
"step": 1829
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.643135340531136e-05,
|
|
"loss": 2.0391,
|
|
"step": 1830
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.606191920112664e-05,
|
|
"loss": 2.002,
|
|
"step": 1831
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.569344242618036e-05,
|
|
"loss": 2.0312,
|
|
"step": 1832
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.532592389346958e-05,
|
|
"loss": 2.082,
|
|
"step": 1833
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.495936441387713e-05,
|
|
"loss": 2.0742,
|
|
"step": 1834
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.459376479617013e-05,
|
|
"loss": 2.0156,
|
|
"step": 1835
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.422912584699752e-05,
|
|
"loss": 1.9668,
|
|
"step": 1836
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.386544837088904e-05,
|
|
"loss": 2.082,
|
|
"step": 1837
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.350273317025251e-05,
|
|
"loss": 2.041,
|
|
"step": 1838
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.314098104537324e-05,
|
|
"loss": 2.1113,
|
|
"step": 1839
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.278019279441122e-05,
|
|
"loss": 1.9355,
|
|
"step": 1840
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.242036921339972e-05,
|
|
"loss": 2.1016,
|
|
"step": 1841
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 6.206151109624402e-05,
|
|
"loss": 2.084,
|
|
"step": 1842
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 6.170361923471868e-05,
|
|
"loss": 2.0273,
|
|
"step": 1843
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 6.134669441846691e-05,
|
|
"loss": 2.0391,
|
|
"step": 1844
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 6.099073743499772e-05,
|
|
"loss": 2.0547,
|
|
"step": 1845
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 6.063574906968511e-05,
|
|
"loss": 2.0996,
|
|
"step": 1846
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 6.028173010576582e-05,
|
|
"loss": 2.0156,
|
|
"step": 1847
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 5.9928681324337544e-05,
|
|
"loss": 1.998,
|
|
"step": 1848
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 5.957660350435773e-05,
|
|
"loss": 1.9453,
|
|
"step": 1849
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 5.922549742264122e-05,
|
|
"loss": 2.0859,
|
|
"step": 1850
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 5.8875363853859166e-05,
|
|
"loss": 2.0137,
|
|
"step": 1851
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 5.852620357053651e-05,
|
|
"loss": 2.0898,
|
|
"step": 1852
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 5.8178017343051336e-05,
|
|
"loss": 1.9668,
|
|
"step": 1853
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 5.783080593963219e-05,
|
|
"loss": 2.0566,
|
|
"step": 1854
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 5.748457012635683e-05,
|
|
"loss": 1.9199,
|
|
"step": 1855
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 5.713931066715078e-05,
|
|
"loss": 2.0293,
|
|
"step": 1856
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 5.679502832378497e-05,
|
|
"loss": 2.0859,
|
|
"step": 1857
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 5.645172385587482e-05,
|
|
"loss": 2.0273,
|
|
"step": 1858
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 5.6109398020877834e-05,
|
|
"loss": 2.0332,
|
|
"step": 1859
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 5.576805157409265e-05,
|
|
"loss": 1.9277,
|
|
"step": 1860
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 5.542768526865677e-05,
|
|
"loss": 1.9648,
|
|
"step": 1861
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 5.508829985554509e-05,
|
|
"loss": 2.0,
|
|
"step": 1862
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 5.474989608356856e-05,
|
|
"loss": 2.0723,
|
|
"step": 1863
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 5.441247469937194e-05,
|
|
"loss": 1.9512,
|
|
"step": 1864
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 5.407603644743286e-05,
|
|
"loss": 2.0156,
|
|
"step": 1865
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 5.374058207005944e-05,
|
|
"loss": 1.9961,
|
|
"step": 1866
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 5.3406112307389066e-05,
|
|
"loss": 2.0156,
|
|
"step": 1867
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 5.3072627897386926e-05,
|
|
"loss": 1.9766,
|
|
"step": 1868
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 5.27401295758439e-05,
|
|
"loss": 2.1133,
|
|
"step": 1869
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 5.2408618076375315e-05,
|
|
"loss": 2.0156,
|
|
"step": 1870
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 5.207809413041914e-05,
|
|
"loss": 1.9375,
|
|
"step": 1871
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 5.174855846723459e-05,
|
|
"loss": 2.0449,
|
|
"step": 1872
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 5.1420011813900104e-05,
|
|
"loss": 1.9941,
|
|
"step": 1873
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 5.109245489531211e-05,
|
|
"loss": 2.0098,
|
|
"step": 1874
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 5.0765888434183446e-05,
|
|
"loss": 2.0176,
|
|
"step": 1875
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 5.0440313151041364e-05,
|
|
"loss": 2.0352,
|
|
"step": 1876
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 5.011572976422657e-05,
|
|
"loss": 1.9941,
|
|
"step": 1877
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 4.9792138989890825e-05,
|
|
"loss": 1.9961,
|
|
"step": 1878
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 4.9469541541996234e-05,
|
|
"loss": 1.9531,
|
|
"step": 1879
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 4.914793813231305e-05,
|
|
"loss": 2.0918,
|
|
"step": 1880
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 4.882732947041818e-05,
|
|
"loss": 2.0449,
|
|
"step": 1881
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 4.850771626369416e-05,
|
|
"loss": 1.9082,
|
|
"step": 1882
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 4.818909921732662e-05,
|
|
"loss": 2.0645,
|
|
"step": 1883
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 4.787147903430383e-05,
|
|
"loss": 2.0078,
|
|
"step": 1884
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 4.755485641541424e-05,
|
|
"loss": 2.1602,
|
|
"step": 1885
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 4.723923205924557e-05,
|
|
"loss": 2.0625,
|
|
"step": 1886
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.6924606662182736e-05,
|
|
"loss": 2.0645,
|
|
"step": 1887
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.6610980918406596e-05,
|
|
"loss": 2.0195,
|
|
"step": 1888
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.629835551989276e-05,
|
|
"loss": 2.0547,
|
|
"step": 1889
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.5986731156409224e-05,
|
|
"loss": 1.957,
|
|
"step": 1890
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.567610851551568e-05,
|
|
"loss": 1.9941,
|
|
"step": 1891
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.536648828256146e-05,
|
|
"loss": 2.1133,
|
|
"step": 1892
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.505787114068433e-05,
|
|
"loss": 2.1445,
|
|
"step": 1893
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.4750257770808764e-05,
|
|
"loss": 2.0078,
|
|
"step": 1894
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.444364885164448e-05,
|
|
"loss": 1.9883,
|
|
"step": 1895
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.413804505968533e-05,
|
|
"loss": 1.9551,
|
|
"step": 1896
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.3833447069206944e-05,
|
|
"loss": 2.0664,
|
|
"step": 1897
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.352985555226635e-05,
|
|
"loss": 2.084,
|
|
"step": 1898
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.322727117869951e-05,
|
|
"loss": 2.0879,
|
|
"step": 1899
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.29256946161205e-05,
|
|
"loss": 2.0098,
|
|
"step": 1900
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.262512652991968e-05,
|
|
"loss": 1.9707,
|
|
"step": 1901
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.2325567583262113e-05,
|
|
"loss": 2.1445,
|
|
"step": 1902
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.2027018437086895e-05,
|
|
"loss": 2.0918,
|
|
"step": 1903
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.172947975010449e-05,
|
|
"loss": 1.9668,
|
|
"step": 1904
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.143295217879645e-05,
|
|
"loss": 2.1172,
|
|
"step": 1905
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.113743637741296e-05,
|
|
"loss": 2.1406,
|
|
"step": 1906
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.084293299797226e-05,
|
|
"loss": 2.0469,
|
|
"step": 1907
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 4.054944269025862e-05,
|
|
"loss": 2.1172,
|
|
"step": 1908
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 4.025696610182095e-05,
|
|
"loss": 2.0703,
|
|
"step": 1909
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.996550387797187e-05,
|
|
"loss": 2.0117,
|
|
"step": 1910
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.9675056661785556e-05,
|
|
"loss": 1.9414,
|
|
"step": 1911
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.9385625094097154e-05,
|
|
"loss": 2.0391,
|
|
"step": 1912
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.909720981350034e-05,
|
|
"loss": 2.0977,
|
|
"step": 1913
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.880981145634704e-05,
|
|
"loss": 2.0742,
|
|
"step": 1914
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.852343065674507e-05,
|
|
"loss": 2.0176,
|
|
"step": 1915
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.8238068046557276e-05,
|
|
"loss": 2.0664,
|
|
"step": 1916
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.795372425540006e-05,
|
|
"loss": 2.1445,
|
|
"step": 1917
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.76703999106418e-05,
|
|
"loss": 1.9648,
|
|
"step": 1918
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.7388095637401754e-05,
|
|
"loss": 2.0938,
|
|
"step": 1919
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.7106812058548376e-05,
|
|
"loss": 1.9766,
|
|
"step": 1920
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.682654979469807e-05,
|
|
"loss": 2.0664,
|
|
"step": 1921
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.654730946421403e-05,
|
|
"loss": 2.0352,
|
|
"step": 1922
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.6269091683204466e-05,
|
|
"loss": 2.0605,
|
|
"step": 1923
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.5991897065521693e-05,
|
|
"loss": 2.0527,
|
|
"step": 1924
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.571572622276026e-05,
|
|
"loss": 2.1152,
|
|
"step": 1925
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.544057976425619e-05,
|
|
"loss": 2.0039,
|
|
"step": 1926
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.5166458297085146e-05,
|
|
"loss": 1.9824,
|
|
"step": 1927
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.489336242606111e-05,
|
|
"loss": 2.0195,
|
|
"step": 1928
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.462129275373577e-05,
|
|
"loss": 2.0293,
|
|
"step": 1929
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 3.4350249880395924e-05,
|
|
"loss": 2.1465,
|
|
"step": 1930
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 3.408023440406355e-05,
|
|
"loss": 1.9766,
|
|
"step": 1931
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 3.381124692049331e-05,
|
|
"loss": 2.0254,
|
|
"step": 1932
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 3.354328802317197e-05,
|
|
"loss": 2.0469,
|
|
"step": 1933
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 3.327635830331677e-05,
|
|
"loss": 1.9902,
|
|
"step": 1934
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 3.3010458349874206e-05,
|
|
"loss": 1.9609,
|
|
"step": 1935
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 3.2745588749518775e-05,
|
|
"loss": 2.0039,
|
|
"step": 1936
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 3.248175008665161e-05,
|
|
"loss": 2.0664,
|
|
"step": 1937
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 3.221894294339911e-05,
|
|
"loss": 2.0605,
|
|
"step": 1938
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 3.1957167899611836e-05,
|
|
"loss": 2.0645,
|
|
"step": 1939
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 3.169642553286334e-05,
|
|
"loss": 1.9941,
|
|
"step": 1940
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 3.143671641844831e-05,
|
|
"loss": 1.916,
|
|
"step": 1941
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 3.117804112938205e-05,
|
|
"loss": 1.9844,
|
|
"step": 1942
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 3.092040023639869e-05,
|
|
"loss": 2.0449,
|
|
"step": 1943
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 3.066379430795002e-05,
|
|
"loss": 1.9941,
|
|
"step": 1944
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 3.040822391020459e-05,
|
|
"loss": 2.0176,
|
|
"step": 1945
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 3.0153689607045842e-05,
|
|
"loss": 2.0664,
|
|
"step": 1946
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 2.9900191960071545e-05,
|
|
"loss": 1.9102,
|
|
"step": 1947
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 2.9647731528591848e-05,
|
|
"loss": 2.0195,
|
|
"step": 1948
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 2.9396308869628795e-05,
|
|
"loss": 1.9629,
|
|
"step": 1949
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 2.914592453791448e-05,
|
|
"loss": 1.9785,
|
|
"step": 1950
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 2.8896579085889994e-05,
|
|
"loss": 2.0898,
|
|
"step": 1951
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.86482730637046e-05,
|
|
"loss": 2.0254,
|
|
"step": 1952
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.840100701921383e-05,
|
|
"loss": 1.9453,
|
|
"step": 1953
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.8154781497978898e-05,
|
|
"loss": 2.0801,
|
|
"step": 1954
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.7909597043265013e-05,
|
|
"loss": 2.0137,
|
|
"step": 1955
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.7665454196040662e-05,
|
|
"loss": 2.0117,
|
|
"step": 1956
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.7422353494975905e-05,
|
|
"loss": 2.0059,
|
|
"step": 1957
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.7180295476441573e-05,
|
|
"loss": 2.1387,
|
|
"step": 1958
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.6939280674508016e-05,
|
|
"loss": 2.0938,
|
|
"step": 1959
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.669930962094358e-05,
|
|
"loss": 2.0488,
|
|
"step": 1960
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.6460382845214126e-05,
|
|
"loss": 2.0527,
|
|
"step": 1961
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.6222500874481025e-05,
|
|
"loss": 2.0508,
|
|
"step": 1962
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.5985664233600827e-05,
|
|
"loss": 2.0469,
|
|
"step": 1963
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.574987344512336e-05,
|
|
"loss": 2.0371,
|
|
"step": 1964
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.5515129029290984e-05,
|
|
"loss": 2.0215,
|
|
"step": 1965
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.5281431504037556e-05,
|
|
"loss": 2.0312,
|
|
"step": 1966
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.504878138498684e-05,
|
|
"loss": 1.9355,
|
|
"step": 1967
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.48171791854519e-05,
|
|
"loss": 2.041,
|
|
"step": 1968
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.4586625416433473e-05,
|
|
"loss": 2.0098,
|
|
"step": 1969
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.435712058661921e-05,
|
|
"loss": 2.0,
|
|
"step": 1970
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.4128665202382327e-05,
|
|
"loss": 2.0156,
|
|
"step": 1971
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.3901259767780515e-05,
|
|
"loss": 1.9434,
|
|
"step": 1972
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 2.367490478455514e-05,
|
|
"loss": 2.0312,
|
|
"step": 1973
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 2.3449600752129597e-05,
|
|
"loss": 2.0859,
|
|
"step": 1974
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 2.3225348167608685e-05,
|
|
"loss": 2.1191,
|
|
"step": 1975
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 2.3002147525777118e-05,
|
|
"loss": 2.0781,
|
|
"step": 1976
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 2.2779999319098856e-05,
|
|
"loss": 2.0957,
|
|
"step": 1977
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 2.255890403771571e-05,
|
|
"loss": 2.0176,
|
|
"step": 1978
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 2.233886216944614e-05,
|
|
"loss": 1.9512,
|
|
"step": 1979
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 2.211987419978484e-05,
|
|
"loss": 2.0059,
|
|
"step": 1980
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 2.1901940611900705e-05,
|
|
"loss": 2.0,
|
|
"step": 1981
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 2.168506188663666e-05,
|
|
"loss": 2.0273,
|
|
"step": 1982
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 2.1469238502507925e-05,
|
|
"loss": 2.0938,
|
|
"step": 1983
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 2.125447093570154e-05,
|
|
"loss": 1.9648,
|
|
"step": 1984
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 2.1040759660074793e-05,
|
|
"loss": 2.0137,
|
|
"step": 1985
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 2.0828105147154273e-05,
|
|
"loss": 2.0195,
|
|
"step": 1986
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 2.061650786613545e-05,
|
|
"loss": 2.0137,
|
|
"step": 1987
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 2.040596828388058e-05,
|
|
"loss": 2.0664,
|
|
"step": 1988
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 2.019648686491865e-05,
|
|
"loss": 2.0781,
|
|
"step": 1989
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 1.9988064071443767e-05,
|
|
"loss": 1.9688,
|
|
"step": 1990
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 1.9780700363314253e-05,
|
|
"loss": 2.0723,
|
|
"step": 1991
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 1.957439619805196e-05,
|
|
"loss": 1.9629,
|
|
"step": 1992
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 1.9369152030840554e-05,
|
|
"loss": 2.0781,
|
|
"step": 1993
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 1.916496831452552e-05,
|
|
"loss": 2.0469,
|
|
"step": 1994
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 1.8961845499611998e-05,
|
|
"loss": 2.082,
|
|
"step": 1995
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.8759784034264925e-05,
|
|
"loss": 2.0684,
|
|
"step": 1996
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.855878436430708e-05,
|
|
"loss": 1.9746,
|
|
"step": 1997
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.835884693321871e-05,
|
|
"loss": 1.9922,
|
|
"step": 1998
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.8159972182136386e-05,
|
|
"loss": 2.0117,
|
|
"step": 1999
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.7962160549851945e-05,
|
|
"loss": 2.0039,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.776541247281177e-05,
|
|
"loss": 1.9648,
|
|
"step": 2001
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.7569728385115224e-05,
|
|
"loss": 1.9316,
|
|
"step": 2002
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.7375108718514665e-05,
|
|
"loss": 2.0234,
|
|
"step": 2003
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.7181553902413438e-05,
|
|
"loss": 2.0156,
|
|
"step": 2004
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.698906436386577e-05,
|
|
"loss": 1.998,
|
|
"step": 2005
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.679764052757532e-05,
|
|
"loss": 1.998,
|
|
"step": 2006
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.6607282815894464e-05,
|
|
"loss": 2.0,
|
|
"step": 2007
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.6417991648823405e-05,
|
|
"loss": 2.043,
|
|
"step": 2008
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.6229767444008835e-05,
|
|
"loss": 1.9316,
|
|
"step": 2009
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.604261061674378e-05,
|
|
"loss": 2.0215,
|
|
"step": 2010
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.5856521579965865e-05,
|
|
"loss": 2.041,
|
|
"step": 2011
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.5671500744256938e-05,
|
|
"loss": 1.9023,
|
|
"step": 2012
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.5487548517841953e-05,
|
|
"loss": 2.0977,
|
|
"step": 2013
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.530466530658814e-05,
|
|
"loss": 1.9902,
|
|
"step": 2014
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.5122851514004054e-05,
|
|
"loss": 1.9922,
|
|
"step": 2015
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.4942107541238703e-05,
|
|
"loss": 2.0137,
|
|
"step": 2016
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1.4762433787080809e-05,
|
|
"loss": 2.041,
|
|
"step": 2017
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.4583830647957541e-05,
|
|
"loss": 2.1211,
|
|
"step": 2018
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.4406298517934068e-05,
|
|
"loss": 2.0918,
|
|
"step": 2019
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.4229837788712562e-05,
|
|
"loss": 2.0391,
|
|
"step": 2020
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.4054448849631085e-05,
|
|
"loss": 2.0078,
|
|
"step": 2021
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.3880132087663145e-05,
|
|
"loss": 2.0293,
|
|
"step": 2022
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.3706887887416419e-05,
|
|
"loss": 1.9629,
|
|
"step": 2023
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.3534716631132316e-05,
|
|
"loss": 2.0117,
|
|
"step": 2024
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.3363618698684853e-05,
|
|
"loss": 2.0293,
|
|
"step": 2025
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.3193594467579728e-05,
|
|
"loss": 2.0078,
|
|
"step": 2026
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.3024644312954026e-05,
|
|
"loss": 2.0117,
|
|
"step": 2027
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.2856768607574564e-05,
|
|
"loss": 1.9492,
|
|
"step": 2028
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.2689967721837947e-05,
|
|
"loss": 2.0078,
|
|
"step": 2029
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.2524242023769006e-05,
|
|
"loss": 1.9824,
|
|
"step": 2030
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.2359591879020526e-05,
|
|
"loss": 2.0078,
|
|
"step": 2031
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.2196017650872081e-05,
|
|
"loss": 1.9688,
|
|
"step": 2032
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.2033519700229367e-05,
|
|
"loss": 2.0645,
|
|
"step": 2033
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.1872098385623586e-05,
|
|
"loss": 2.0273,
|
|
"step": 2034
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.1711754063210289e-05,
|
|
"loss": 2.0,
|
|
"step": 2035
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.155248708676887e-05,
|
|
"loss": 1.9766,
|
|
"step": 2036
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.1394297807701737e-05,
|
|
"loss": 2.0898,
|
|
"step": 2037
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.1237186575033254e-05,
|
|
"loss": 1.998,
|
|
"step": 2038
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1.1081153735409522e-05,
|
|
"loss": 2.0332,
|
|
"step": 2039
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 1.0926199633097156e-05,
|
|
"loss": 1.9883,
|
|
"step": 2040
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 1.0772324609982787e-05,
|
|
"loss": 2.002,
|
|
"step": 2041
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 1.0619529005571893e-05,
|
|
"loss": 1.9922,
|
|
"step": 2042
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 1.0467813156988748e-05,
|
|
"loss": 1.9629,
|
|
"step": 2043
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 1.0317177398975031e-05,
|
|
"loss": 2.0508,
|
|
"step": 2044
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 1.0167622063889326e-05,
|
|
"loss": 2.0371,
|
|
"step": 2045
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 1.0019147481706625e-05,
|
|
"loss": 2.0195,
|
|
"step": 2046
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 9.871753980017051e-06,
|
|
"loss": 2.0234,
|
|
"step": 2047
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 9.725441884025855e-06,
|
|
"loss": 1.9863,
|
|
"step": 2048
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 9.580211516551862e-06,
|
|
"loss": 2.1211,
|
|
"step": 2049
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 9.436063198027589e-06,
|
|
"loss": 1.9766,
|
|
"step": 2050
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 9.292997246497959e-06,
|
|
"loss": 1.9551,
|
|
"step": 2051
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 9.151013977619693e-06,
|
|
"loss": 2.1172,
|
|
"step": 2052
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 9.010113704661038e-06,
|
|
"loss": 2.0117,
|
|
"step": 2053
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 8.870296738500316e-06,
|
|
"loss": 2.0859,
|
|
"step": 2054
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 8.731563387626095e-06,
|
|
"loss": 2.0391,
|
|
"step": 2055
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 8.59391395813569e-06,
|
|
"loss": 1.9883,
|
|
"step": 2056
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 8.457348753735328e-06,
|
|
"loss": 2.0566,
|
|
"step": 2057
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 8.321868075738593e-06,
|
|
"loss": 1.9922,
|
|
"step": 2058
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 8.187472223066371e-06,
|
|
"loss": 2.082,
|
|
"step": 2059
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 8.054161492246136e-06,
|
|
"loss": 2.0195,
|
|
"step": 2060
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 7.921936177411049e-06,
|
|
"loss": 2.0508,
|
|
"step": 2061
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 7.790796570299463e-06,
|
|
"loss": 1.9766,
|
|
"step": 2062
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 7.660742960254207e-06,
|
|
"loss": 2.0469,
|
|
"step": 2063
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 7.531775634222138e-06,
|
|
"loss": 1.9531,
|
|
"step": 2064
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 7.403894876753192e-06,
|
|
"loss": 2.0039,
|
|
"step": 2065
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 7.277100970000061e-06,
|
|
"loss": 2.0527,
|
|
"step": 2066
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 7.151394193717408e-06,
|
|
"loss": 1.9551,
|
|
"step": 2067
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 7.026774825261151e-06,
|
|
"loss": 2.002,
|
|
"step": 2068
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 6.903243139588233e-06,
|
|
"loss": 1.9551,
|
|
"step": 2069
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 6.780799409255522e-06,
|
|
"loss": 1.9785,
|
|
"step": 2070
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 6.659443904419637e-06,
|
|
"loss": 2.1602,
|
|
"step": 2071
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 6.539176892836008e-06,
|
|
"loss": 2.0391,
|
|
"step": 2072
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 6.4199986398585375e-06,
|
|
"loss": 2.0605,
|
|
"step": 2073
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 6.3019094084388884e-06,
|
|
"loss": 2.0352,
|
|
"step": 2074
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 6.18490945912592e-06,
|
|
"loss": 2.0,
|
|
"step": 2075
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 6.068999050065249e-06,
|
|
"loss": 2.041,
|
|
"step": 2076
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 5.9541784369983586e-06,
|
|
"loss": 2.0684,
|
|
"step": 2077
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 5.840447873262433e-06,
|
|
"loss": 2.0781,
|
|
"step": 2078
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 5.727807609789471e-06,
|
|
"loss": 2.0039,
|
|
"step": 2079
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 5.616257895105892e-06,
|
|
"loss": 1.9902,
|
|
"step": 2080
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 5.505798975331933e-06,
|
|
"loss": 2.0723,
|
|
"step": 2081
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 5.396431094181198e-06,
|
|
"loss": 2.0684,
|
|
"step": 2082
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 5.288154492960107e-06,
|
|
"loss": 2.082,
|
|
"step": 2083
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 5.1809694105671155e-06,
|
|
"loss": 2.0645,
|
|
"step": 2084
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 5.074876083492441e-06,
|
|
"loss": 2.0039,
|
|
"step": 2085
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 4.96987474581767e-06,
|
|
"loss": 2.0508,
|
|
"step": 2086
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 4.865965629214819e-06,
|
|
"loss": 1.9219,
|
|
"step": 2087
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 4.763148962946218e-06,
|
|
"loss": 2.0234,
|
|
"step": 2088
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 4.661424973863681e-06,
|
|
"loss": 1.9219,
|
|
"step": 2089
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 4.560793886408398e-06,
|
|
"loss": 2.0254,
|
|
"step": 2090
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 4.461255922609986e-06,
|
|
"loss": 2.1016,
|
|
"step": 2091
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 4.362811302086267e-06,
|
|
"loss": 2.0664,
|
|
"step": 2092
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 4.265460242042885e-06,
|
|
"loss": 1.9824,
|
|
"step": 2093
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 4.169202957272522e-06,
|
|
"loss": 2.0488,
|
|
"step": 2094
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 4.074039660154738e-06,
|
|
"loss": 2.0352,
|
|
"step": 2095
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 3.9799705606551325e-06,
|
|
"loss": 2.1035,
|
|
"step": 2096
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 3.886995866325294e-06,
|
|
"loss": 1.9941,
|
|
"step": 2097
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 3.795115782302072e-06,
|
|
"loss": 2.0957,
|
|
"step": 2098
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 3.704330511307197e-06,
|
|
"loss": 1.9336,
|
|
"step": 2099
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 3.614640253646828e-06,
|
|
"loss": 1.9941,
|
|
"step": 2100
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 3.5260452072110594e-06,
|
|
"loss": 2.125,
|
|
"step": 2101
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 3.4385455674737498e-06,
|
|
"loss": 2.0059,
|
|
"step": 2102
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 3.3521415274915256e-06,
|
|
"loss": 2.082,
|
|
"step": 2103
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 3.2668332779041133e-06,
|
|
"loss": 2.0,
|
|
"step": 2104
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 3.1826210069332838e-06,
|
|
"loss": 1.9844,
|
|
"step": 2105
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 3.0995049003826324e-06,
|
|
"loss": 1.9941,
|
|
"step": 2106
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 3.017485141637355e-06,
|
|
"loss": 1.9863,
|
|
"step": 2107
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 2.9365619116636376e-06,
|
|
"loss": 2.043,
|
|
"step": 2108
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 2.856735389008269e-06,
|
|
"loss": 1.9785,
|
|
"step": 2109
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 2.778005749798307e-06,
|
|
"loss": 2.0254,
|
|
"step": 2110
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 2.700373167740744e-06,
|
|
"loss": 1.9805,
|
|
"step": 2111
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 2.62383781412201e-06,
|
|
"loss": 1.998,
|
|
"step": 2112
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 2.5483998578076373e-06,
|
|
"loss": 1.957,
|
|
"step": 2113
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 2.4740594652418736e-06,
|
|
"loss": 1.8984,
|
|
"step": 2114
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 2.4008168004472917e-06,
|
|
"loss": 2.002,
|
|
"step": 2115
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 2.3286720250246253e-06,
|
|
"loss": 1.9707,
|
|
"step": 2116
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 2.2576252981520994e-06,
|
|
"loss": 1.957,
|
|
"step": 2117
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 2.1876767765853233e-06,
|
|
"loss": 1.9902,
|
|
"step": 2118
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 2.118826614656788e-06,
|
|
"loss": 1.9766,
|
|
"step": 2119
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 2.051074964275701e-06,
|
|
"loss": 2.0625,
|
|
"step": 2120
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 1.984421974927375e-06,
|
|
"loss": 1.9844,
|
|
"step": 2121
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 1.9188677936731734e-06,
|
|
"loss": 1.9902,
|
|
"step": 2122
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 1.8544125651501208e-06,
|
|
"loss": 2.0508,
|
|
"step": 2123
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 1.7910564315704035e-06,
|
|
"loss": 2.0078,
|
|
"step": 2124
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 1.7287995327214257e-06,
|
|
"loss": 2.0039,
|
|
"step": 2125
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 1.6676420059649754e-06,
|
|
"loss": 2.041,
|
|
"step": 2126
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 1.6075839862374485e-06,
|
|
"loss": 2.0098,
|
|
"step": 2127
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 1.5486256060492366e-06,
|
|
"loss": 2.0371,
|
|
"step": 2128
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 1.4907669954844495e-06,
|
|
"loss": 2.0293,
|
|
"step": 2129
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 1.434008282200805e-06,
|
|
"loss": 1.957,
|
|
"step": 2130
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 1.3783495914291844e-06,
|
|
"loss": 2.0156,
|
|
"step": 2131
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 1.3237910459734104e-06,
|
|
"loss": 2.0977,
|
|
"step": 2132
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 1.270332766210025e-06,
|
|
"loss": 2.0195,
|
|
"step": 2133
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 1.2179748700879012e-06,
|
|
"loss": 2.0762,
|
|
"step": 2134
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 1.1667174731280205e-06,
|
|
"loss": 2.0566,
|
|
"step": 2135
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 1.1165606884234182e-06,
|
|
"loss": 2.0586,
|
|
"step": 2136
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 1.0675046266386268e-06,
|
|
"loss": 2.0742,
|
|
"step": 2137
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 1.019549396009567e-06,
|
|
"loss": 1.9824,
|
|
"step": 2138
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 9.726951023434348e-07,
|
|
"loss": 2.0117,
|
|
"step": 2139
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 9.269418490182591e-07,
|
|
"loss": 1.9766,
|
|
"step": 2140
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 8.822897369827332e-07,
|
|
"loss": 2.0781,
|
|
"step": 2141
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 8.387388647561611e-07,
|
|
"loss": 2.0059,
|
|
"step": 2142
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 7.962893284279016e-07,
|
|
"loss": 1.9863,
|
|
"step": 2143
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 7.549412216574791e-07,
|
|
"loss": 2.0723,
|
|
"step": 2144
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 7.146946356743067e-07,
|
|
"loss": 2.0039,
|
|
"step": 2145
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 6.755496592773524e-07,
|
|
"loss": 2.0664,
|
|
"step": 2146
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 6.375063788349733e-07,
|
|
"loss": 2.0469,
|
|
"step": 2147
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 6.005648782848594e-07,
|
|
"loss": 2.1445,
|
|
"step": 2148
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 5.647252391337565e-07,
|
|
"loss": 2.0098,
|
|
"step": 2149
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 5.299875404572441e-07,
|
|
"loss": 1.9766,
|
|
"step": 2150
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 4.963518588996796e-07,
|
|
"loss": 1.9668,
|
|
"step": 2151
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 4.638182686738657e-07,
|
|
"loss": 2.0195,
|
|
"step": 2152
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 4.3238684156110543e-07,
|
|
"loss": 2.002,
|
|
"step": 2153
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 4.020576469108139e-07,
|
|
"loss": 1.9883,
|
|
"step": 2154
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 3.7283075164046274e-07,
|
|
"loss": 2.0059,
|
|
"step": 2155
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 3.4470622023557995e-07,
|
|
"loss": 2.0664,
|
|
"step": 2156
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 3.176841147492504e-07,
|
|
"loss": 1.8789,
|
|
"step": 2157
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 2.9176449480244895e-07,
|
|
"loss": 1.9727,
|
|
"step": 2158
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 2.6694741758342967e-07,
|
|
"loss": 2.0508,
|
|
"step": 2159
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 2.432329378478926e-07,
|
|
"loss": 2.0332,
|
|
"step": 2160
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 2.2062110791892798e-07,
|
|
"loss": 2.1211,
|
|
"step": 2161
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 1.9911197768662792e-07,
|
|
"loss": 2.0195,
|
|
"step": 2162
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 1.7870559460814173e-07,
|
|
"loss": 1.9922,
|
|
"step": 2163
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 1.5940200370750947e-07,
|
|
"loss": 2.0645,
|
|
"step": 2164
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 1.4120124757577291e-07,
|
|
"loss": 2.1094,
|
|
"step": 2165
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 1.2410336637047603e-07,
|
|
"loss": 1.9922,
|
|
"step": 2166
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 1.081083978159425e-07,
|
|
"loss": 2.0859,
|
|
"step": 2167
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 9.321637720310915e-08,
|
|
"loss": 2.0371,
|
|
"step": 2168
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 7.942733738924845e-08,
|
|
"loss": 1.9688,
|
|
"step": 2169
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 6.6741308798135e-08,
|
|
"loss": 1.918,
|
|
"step": 2170
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"learning_rate": 5.5158319419934546e-08,
|
|
"loss": 2.0449,
|
|
"step": 2171
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"learning_rate": 4.4678394810981906e-08,
|
|
"loss": 1.9531,
|
|
"step": 2172
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"learning_rate": 3.5301558093947527e-08,
|
|
"loss": 1.9609,
|
|
"step": 2173
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"learning_rate": 2.7027829957559925e-08,
|
|
"loss": 1.9941,
|
|
"step": 2174
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"learning_rate": 1.985722865682771e-08,
|
|
"loss": 2.1387,
|
|
"step": 2175
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"learning_rate": 1.3789770012762048e-08,
|
|
"loss": 2.0039,
|
|
"step": 2176
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"learning_rate": 8.825467412376665e-09,
|
|
"loss": 2.1055,
|
|
"step": 2177
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"learning_rate": 4.96433180879885e-09,
|
|
"loss": 1.9434,
|
|
"step": 2178
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"learning_rate": 2.206371721158451e-09,
|
|
"loss": 1.9727,
|
|
"step": 2179
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"learning_rate": 5.515932345323549e-10,
|
|
"loss": 2.123,
|
|
"step": 2180
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"learning_rate": 0.0,
|
|
"loss": 1.9941,
|
|
"step": 2181
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"step": 2181,
|
|
"total_flos": 7.961046901766226e+17,
|
|
"train_loss": 2.16395056023613,
|
|
"train_runtime": 23682.7664,
|
|
"train_samples_per_second": 23.567,
|
|
"train_steps_per_second": 0.092
|
|
}
|
|
],
|
|
"max_steps": 2181,
|
|
"num_train_epochs": 1,
|
|
"total_flos": 7.961046901766226e+17,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|