Spaces:
Running
on
Zero
Running
on
Zero
File size: 1,844 Bytes
465d7e4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 |
import os
import sys
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
sys.path.append(PROJECT_ROOT)
import yaml
import argparse
from lightning import Trainer
from lightning.pytorch.loggers import WandbLogger
from trainer import XrayReg
import logging
import wandb
from lightning.pytorch.callbacks import LearningRateMonitor
def parse_args():
parser = argparse.ArgumentParser(description="Train Xray Model")
parser.add_argument(
"--config",
type=str,
default="configs/config.yaml",
help="Path to the config file",
)
return parser.parse_args()
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
try:
args = parse_args()
with open(args.config, "r") as ymlfile:
config = yaml.safe_load(ymlfile)
wandb.init(
project=config.get("wandb_project", "xray_regression_noaug"))
wandb_logger = WandbLogger(
project=config.get("wandb_project", "xray_regression_noaug"))
lr_monitor = LearningRateMonitor(logging_interval="step")
trainer = Trainer(
max_epochs=config["training"]["max_epochs"],
log_every_n_steps=config["logging"]["log_every_n_steps"],
logger=wandb_logger,
callbacks=[lr_monitor])
model = XrayReg(config)
logger.info("Starting training...")
trainer.fit(model)
logger.info("Training completed. Starting testing...")
trainer.test(model)
logger.info("Testing completed. Logging test results...")
model.save_test_results_to_wandb()
logger.info("Test results saved to Wandb")
wandb.finish()
except Exception as e:
logger.error(f"An error occurred: {e}")
sys.exit(1)
|