ppo-Doggy / run_logs /training_status.json
avecoder's picture
Doggy
b434918
{
"Huggy": {
"checkpoints": [
{
"steps": 199937,
"file_path": "results/Doggy9/Huggy/Huggy-199937.onnx",
"reward": 3.3582554483735882,
"creation_time": 1694269475.2351584,
"auxillary_file_paths": [
"results/Doggy9/Huggy/Huggy-199937.pt"
]
},
{
"steps": 399345,
"file_path": "results/Doggy9/Huggy/Huggy-399345.onnx",
"reward": 3.943419728480594,
"creation_time": 1694269898.4907684,
"auxillary_file_paths": [
"results/Doggy9/Huggy/Huggy-399345.pt"
]
},
{
"steps": 599970,
"file_path": "results/Doggy9/Huggy/Huggy-599970.onnx",
"reward": 3.633149156765062,
"creation_time": 1694270329.452095,
"auxillary_file_paths": [
"results/Doggy9/Huggy/Huggy-599970.pt"
]
},
{
"steps": 799906,
"file_path": "results/Doggy9/Huggy/Huggy-799906.onnx",
"reward": 3.9329614894347147,
"creation_time": 1694270731.5866516,
"auxillary_file_paths": [
"results/Doggy9/Huggy/Huggy-799906.pt"
]
},
{
"steps": 999963,
"file_path": "results/Doggy9/Huggy/Huggy-999963.onnx",
"reward": 3.941510277505247,
"creation_time": 1694271153.693182,
"auxillary_file_paths": [
"results/Doggy9/Huggy/Huggy-999963.pt"
]
},
{
"steps": 1199958,
"file_path": "results/Doggy9/Huggy/Huggy-1199958.onnx",
"reward": 3.686204390039722,
"creation_time": 1694271582.6200478,
"auxillary_file_paths": [
"results/Doggy9/Huggy/Huggy-1199958.pt"
]
},
{
"steps": 1399891,
"file_path": "results/Doggy9/Huggy/Huggy-1399891.onnx",
"reward": 3.669710972729851,
"creation_time": 1694272016.0524597,
"auxillary_file_paths": [
"results/Doggy9/Huggy/Huggy-1399891.pt"
]
},
{
"steps": 1599959,
"file_path": "results/Doggy9/Huggy/Huggy-1599959.onnx",
"reward": 3.912418329764989,
"creation_time": 1694272422.9833078,
"auxillary_file_paths": [
"results/Doggy9/Huggy/Huggy-1599959.pt"
]
},
{
"steps": 1799952,
"file_path": "results/Doggy9/Huggy/Huggy-1799952.onnx",
"reward": 3.605493782247816,
"creation_time": 1694272852.6953127,
"auxillary_file_paths": [
"results/Doggy9/Huggy/Huggy-1799952.pt"
]
},
{
"steps": 1999962,
"file_path": "results/Doggy9/Huggy/Huggy-1999962.onnx",
"reward": 4.037164392009858,
"creation_time": 1694273277.5905137,
"auxillary_file_paths": [
"results/Doggy9/Huggy/Huggy-1999962.pt"
]
},
{
"steps": 2000027,
"file_path": "results/Doggy9/Huggy/Huggy-2000027.onnx",
"reward": 4.020351781490001,
"creation_time": 1694273277.6858068,
"auxillary_file_paths": [
"results/Doggy9/Huggy/Huggy-2000027.pt"
]
}
],
"final_checkpoint": {
"steps": 2000027,
"file_path": "results/Doggy9/Huggy.onnx",
"reward": 4.020351781490001,
"creation_time": 1694273277.6858068,
"auxillary_file_paths": [
"results/Doggy9/Huggy/Huggy-2000027.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}