- Notifications
You must be signed in to change notification settings - Fork352
LLM fine-tuning#1350
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to ourterms of service andprivacy statement. We’ll occasionally send you account related emails.
Already on GitHub?Sign in to your account
Uh oh!
There was an error while loading.Please reload this page.
LLM fine-tuning#1350
Changes fromall commits
e3bea27c4cf332fb7cc2a558448782cb4f7c10de4763ee09b865ae28097a8cf2dd50e66ac87221e40cd8312d893afc2e9322ee5c797d455db70094465d2f8b5f1b5f408084bfdc0c6ee421af8f4bbca963db857c7cbee439284cf166c65c85759ee3b53916831215b8dae6b74dae5ffc435f5bdaeb2683e5221cc6db147eFile filter
Filter by extension
Conversations
Uh oh!
There was an error while loading.Please reload this page.
Jump to
Uh oh!
There was an error while loading.Please reload this page.
Diff view
Diff view
There are no files selected for viewing
Large diffs are not rendered by default.
Uh oh!
There was an error while loading.Please reload this page.
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -14,3 +14,5 @@ | ||
| .DS_Store | ||
| # venv | ||
| pgml-venv | ||
Some generated files are not rendered by default. Learn more abouthow customized files appear on GitHub.
Uh oh!
There was an error while loading.Please reload this page.
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,12 @@ | ||
| -- Add conversation, text-pair-classification task type | ||
| ALTER TYPE pgml.task ADD VALUE IF NOT EXISTS 'conversation'; | ||
| ALTER TYPE pgml.task ADD VALUE IF NOT EXISTS 'text-pair-classification'; | ||
| -- Crate pgml.logs table | ||
| CREATE TABLE IF NOT EXISTS pgml.logs ( | ||
| id SERIAL PRIMARY KEY, | ||
| model_id BIGINT, | ||
| project_id BIGINT, | ||
| created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, | ||
| logs JSONB | ||
| ); |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -803,7 +803,7 @@ fn tune( | ||
| project_name: &str, | ||
| task: default!(Option<&str>, "NULL"), | ||
| relation_name: default!(Option<&str>, "NULL"), | ||
| _y_column_name: default!(Option<&str>, "NULL"), | ||
Contributor There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others.Learn more. Why the underscore? Is it because it's not used? ContributorAuthor There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others.Learn more. That's correct. | ||
| model_name: default!(Option<&str>, "NULL"), | ||
| hyperparams: default!(JsonB, "'{}'"), | ||
| test_size: default!(f32, 0.25), | ||
| @@ -861,9 +861,7 @@ fn tune( | ||
| let snapshot = Snapshot::create( | ||
| relation_name, | ||
| None, | ||
| test_size, | ||
| test_sampling, | ||
| materialize_snapshot, | ||
| @@ -885,13 +883,14 @@ fn tune( | ||
| // algorithm will be transformers, stash the model_name in a hyperparam for v1 compatibility. | ||
| let mut hyperparams = hyperparams.0.as_object().unwrap().clone(); | ||
| hyperparams.insert(String::from("model_name"), json!(model_name)); | ||
| hyperparams.insert(String::from("project_name"), json!(project_name)); | ||
| let hyperparams = JsonB(json!(hyperparams)); | ||
| // # Default repeatable random state when possible | ||
| // let algorithm = Model.algorithm_from_name_and_task(algorithm, task); | ||
| // if "random_state" in algorithm().get_params() and "random_state" not in hyperparams: | ||
| // hyperparams["random_state"] = 0 | ||
| let model = Model::finetune(&project, &mut snapshot, &hyperparams); | ||
| let new_metrics: &serde_json::Value = &model.metrics.unwrap().0; | ||
| let new_metrics = new_metrics.as_object().unwrap(); | ||
| @@ -915,18 +914,19 @@ fn tune( | ||
| Some(true) | None => { | ||
| if let Ok(Some(deployed_metrics)) = deployed_metrics { | ||
| let deployed_metrics = deployed_metrics.0.as_object().unwrap(); | ||
| let deployed_value = deployed_metrics | ||
| .get(&project.task.default_target_metric()) | ||
| .and_then(|value| value.as_f64()) | ||
| .unwrap_or_default(); // Default to 0.0 if the key is not present or conversion fails | ||
| // Get the value for the default target metric from new_metrics or provide a default value | ||
| let new_value = new_metrics | ||
| .get(&project.task.default_target_metric()) | ||
| .and_then(|value| value.as_f64()) | ||
| .unwrap_or_default(); // Default to 0.0 if the key is not present or conversion fails | ||
| if project.task.value_is_better(deployed_value, new_value) { | ||
| deploy = false; | ||
| } | ||
| } | ||
Uh oh!
There was an error while loading.Please reload this page.