This repository has been archived on 2023-11-28. You can view files and clone it, but cannot push or open issues/pull-requests.
2023-02-19 20:57:32 -08:00
|
|
|
import torch
|
|
|
|
from pathlib import Path
|
|
|
|
|
|
|
|
import celeste_ai.plotting as plotting
|
|
|
|
from multiprocessing import Pool
|
|
|
|
|
|
|
|
|
|
|
|
m = Path("model_data/current")
|
|
|
|
|
2023-02-26 15:26:45 -08:00
|
|
|
|
2023-02-19 20:57:32 -08:00
|
|
|
def plot_pred(src_model):
|
|
|
|
plotting.predicted_reward(
|
|
|
|
src_model,
|
|
|
|
m / f"plots/predicted/{src_model.stem}.png",
|
|
|
|
|
2023-02-24 14:24:49 -08:00
|
|
|
device = torch.device("cpu")
|
2023-02-19 20:57:32 -08:00
|
|
|
)
|
|
|
|
|
2023-02-24 14:23:48 -08:00
|
|
|
def plot_best(src_model):
|
|
|
|
plotting.best_action(
|
|
|
|
src_model,
|
|
|
|
m / f"plots/best_action/{src_model.stem}.png",
|
|
|
|
|
2023-02-24 14:24:49 -08:00
|
|
|
device = torch.device("cpu")
|
2023-02-24 14:23:48 -08:00
|
|
|
)
|
2023-02-19 20:57:32 -08:00
|
|
|
|
2023-02-24 14:23:48 -08:00
|
|
|
|
2023-02-26 15:26:45 -08:00
|
|
|
for k, v in {
|
|
|
|
#"prediction": plot_pred,
|
|
|
|
"best_action": plot_best,
|
|
|
|
}.items():
|
|
|
|
print(f"Making {k} plots...")
|
|
|
|
with Pool(5) as p:
|
|
|
|
p.map(
|
|
|
|
v,
|
|
|
|
list((m / "model_archive").iterdir())
|
|
|
|
)
|