Spaces:
Running
on
Zero
Running
on
Zero
File size: 1,941 Bytes
12edc27 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
Configuration management for IC-Custom application.
"""
import os
import argparse
from omegaconf import OmegaConf
def parse_args():
"""Parse command line arguments."""
parser = argparse.ArgumentParser(description="IC-Custom App.")
parser.add_argument(
"--config",
type=str,
default="configs/app/app.yaml",
help="path to config",
)
parser.add_argument(
"--hf_token",
type=str,
required=False,
help="Hugging Face token",
)
parser.add_argument(
"--hf_cache_dir",
type=str,
required=False,
default=os.path.expanduser("~/.cache/huggingface/hub"),
help="Cache directory to save the models, default is ~/.cache/huggingface/hub",
)
parser.add_argument(
"--assets_cache_dir",
type=str,
required=False,
default="results/app",
help="Cache directory to save the results, default is results/app",
)
parser.add_argument(
"--save_results",
action="store_true",
help="Save results",
)
parser.add_argument(
"--enable_ben2_for_mask_ref",
action=argparse.BooleanOptionalAction,
default=True,
help="Enable ben2 for mask reference (default: True)",
)
parser.add_argument(
"--enable_vlm_for_prompt",
action=argparse.BooleanOptionalAction,
default=False,
help="Enable vlm for prompt (default: True)",
)
return parser.parse_args()
def load_config(config_path):
"""Load configuration from file."""
return OmegaConf.load(config_path)
def setup_environment(args):
"""Setup environment variables from command line arguments."""
if args.hf_token is not None:
os.environ["HF_TOKEN"] = args.hf_token
if args.hf_cache_dir is not None:
os.environ["HF_HUB_CACHE"] = args.hf_cache_dir
|