소스 검색

define default model path once, sync path with readme (#1366)

András Salamon 2 년 전
부모
커밋
9560655409
5개의 변경된 파일1개의 추가작업 그리고 5개의 파일을 삭제
  1. 1 1
      examples/common.h
  2. 0 1
      examples/embedding/embedding.cpp
  3. 0 1
      examples/main/main.cpp
  4. 0 1
      examples/perplexity/perplexity.cpp
  5. 0 1
      examples/save-load-state/save-load-state.cpp

+ 1 - 1
examples/common.h

@@ -45,7 +45,7 @@ struct gpt_params {
     float   mirostat_tau      = 5.00f; // target entropy
     float   mirostat_eta      = 0.10f; // learning rate
 
-    std::string model  = "models/lamma-7B/ggml-model.bin"; // model path
+    std::string model  = "models/7B/ggml-model.bin"; // model path
     std::string prompt = "";
     std::string path_prompt_cache = "";  // path to file for saving/loading prompt eval state
     std::string input_prefix      = "";  // string to prefix user inputs with

+ 0 - 1
examples/embedding/embedding.cpp

@@ -6,7 +6,6 @@
 
 int main(int argc, char ** argv) {
     gpt_params params;
-    params.model = "models/llama-7B/ggml-model.bin";
 
     if (gpt_params_parse(argc, argv, params) == false) {
         return 1;

+ 0 - 1
examples/main/main.cpp

@@ -50,7 +50,6 @@ void sigint_handler(int signo) {
 
 int main(int argc, char ** argv) {
     gpt_params params;
-    params.model = "models/llama-7B/ggml-model.bin";
 
     if (gpt_params_parse(argc, argv, params) == false) {
         return 1;

+ 0 - 1
examples/perplexity/perplexity.cpp

@@ -116,7 +116,6 @@ void perplexity(llama_context * ctx, const gpt_params & params) {
 
 int main(int argc, char ** argv) {
     gpt_params params;
-    params.model = "models/llama-7B/ggml-model.bin";
 
     params.n_batch = 512;
     if (gpt_params_parse(argc, argv, params) == false) {

+ 0 - 1
examples/save-load-state/save-load-state.cpp

@@ -8,7 +8,6 @@
 
 int main(int argc, char ** argv) {
     gpt_params params;
-    params.model = "models/llama-7B/ggml-model.bin";
     params.seed = 42;
     params.n_threads = 4;
     params.repeat_last_n = 64;