Commit 89c84aef by nzy

step1: merge mk_prompt and sample

parent 4fdf6d7d
...@@ -64,4 +64,12 @@ def mk_sample_prompt(model_path, apps_path, output_path): ...@@ -64,4 +64,12 @@ def mk_sample_prompt(model_path, apps_path, output_path):
if __name__ == "__main__": if __name__ == "__main__":
cfg = read_config() cfg = read_config()
mk_sample_prompt(cfg["model"], cfg["apps"], cfg["sample"]["sample_prompt_path"]) mk_sample_prompt(cfg["model"], cfg["apps"], cfg["sample"]["sample_prompt_path"])
\ No newline at end of file
from utils_vllm import vllm_chatcomplete
vllm_chatcomplete(
cfg["model"],
cfg["sample"]["sample_prompt_path"],
cfg["sample"]["sample_result_path"],
cfg["sample"]["sampling_params"],
)
\ No newline at end of file
from utils_vllm import vllm_chatcomplete
from utils import read_config
if __name__ == "__main__":
cfg = read_config()
vllm_chatcomplete(
cfg["model"],
cfg["sample"]["sample_prompt_path"],
cfg["sample"]["sample_result_path"],
cfg["sample"]["sampling_params"],
)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment