pymarl3-feudal/results/sacred/10gen_protoss/feudal/3/run.json
2025-01-08 18:39:17 +08:00

116 lines
No EOL
2.8 KiB
JSON

{
"artifacts": [],
"command": "my_main",
"experiment": {
"base_dir": "C:\\Users\\Taiyo\\Desktop\\SMAC V2\\pymarl3\\src",
"dependencies": [
"numpy==1.23.1",
"PyYAML==6.0.2",
"sacred==0.8.7",
"torch==1.13.1+cu117"
],
"mainfile": "main.py",
"name": "pymarl",
"repositories": [
{
"commit": "44bb188185fd0292a1a306c86396027efb44224d",
"dirty": true,
"url": "https://github.com/tjuHaoXiaotian/pymarl3.git"
},
{
"commit": "44bb188185fd0292a1a306c86396027efb44224d",
"dirty": true,
"url": "https://github.com/tjuHaoXiaotian/pymarl3.git"
}
],
"sources": [
[
"main.py",
"_sources\\main_654daaa6534bcee62784d639ea63e51d.py"
],
[
"utils\\logging.py",
"_sources\\logging_f71df6d788e929fac28afdf951d63d54.py"
]
]
},
"heartbeat": "2025-01-05T19:53:52.751678",
"host": {
"ENV": {},
"cpu": "AMD Ryzen 7 5700X3D 8-Core Processor",
"gpus": {
"driver_version": "560.94",
"gpus": [
{
"model": "NVIDIA GeForce RTX 4080 SUPER",
"persistence_mode": false,
"total_memory": 16376
}
]
},
"hostname": "Taiyopen",
"os": [
"Windows",
"Windows-10-10.0.22631-SP0"
],
"python_version": "3.10.16"
},
"meta": {
"command": "my_main",
"config_updates": {
"batch_size": 128,
"batch_size_run": 4,
"buffer_size": 5000,
"epsilon_anneal_time": 100000,
"obs_agent_id": true,
"obs_last_action": false,
"runner": "parallel",
"t_max": 4050000,
"td_lambda": 0.6,
"use_tensorboard": false
},
"named_configs": [],
"options": {
"--beat-interval": null,
"--capture": null,
"--comment": null,
"--debug": false,
"--enforce_clean": false,
"--file_storage": null,
"--force": false,
"--help": false,
"--id": null,
"--loglevel": null,
"--mongo_db": null,
"--name": null,
"--pdb": false,
"--print-config": false,
"--priority": null,
"--queue": false,
"--s3": null,
"--sql": null,
"--tiny_db": null,
"--unobserved": false,
"COMMAND": null,
"UPDATE": [
"obs_agent_id=True",
"obs_last_action=False",
"runner=parallel",
"batch_size_run=4",
"buffer_size=5000",
"t_max=4050000",
"epsilon_anneal_time=100000",
"batch_size=128",
"td_lambda=0.6",
"use_tensorboard=False"
],
"help": false,
"with": true
}
},
"resources": [],
"result": null,
"start_time": "2025-01-05T19:51:28.620969",
"status": "INTERRUPTED",
"stop_time": "2025-01-05T19:53:52.757749"
}