mb706 / autoxgboost3 Goto Github PK
View Code? Open in Web Editor NEWautoxgboost emulation for mlr3
License: Other
autoxgboost emulation for mlr3
License: Other
use task from mattermost file
axgb_settings = autoxgboost_space(task, tune.threshold = FALSE)
rsmp_inner = axgb_settings$resampling
learner = axgb_settings$learner
ps = axgb_settings$searchspace
# i added this and then it suddenly worked
ps$add(learner$param_set$params$xgboost.booster)
###########
ti = TuningInstance$new(task = task, learner = learner, resampling = rsmp_inner, param_set = ps, measures = msr("classif.ce"), terminator = term("evals", n_evals = 1))
tuner = tnr("random_search")
# here would be the error
tuner$tune(ti)
wparam = ParamDbl$new("xgboosts.cale_pos_weight", -10, 10)
must be
wparam = ParamDbl$new("xgboost.scale_pos_weight", -10, 10)
Code to reproduce the error (the first part is for downloading the data):
ll = mlr3misc::encapsulate("callr", function(task.id) {
library(mlr)
library(OpenML)
setOMLConfig(arff.reader = "RWeka")
OMLtask = convertOMLTaskToMlr(getOMLTask(task.id))
data = getTaskData(OMLtask$mlr.task)
rin = OMLtask$mlr.rin
rdesc = OMLtask$mlr.rin$desc
task.id = OMLtask$mlr.task$task.desc$id
task.type = OMLtask$mlr.task$type
target = OMLtask$mlr.task$task.desc$target
return(list(task.id = task.id, task.type = task.type, data = data, rin = rin, rdesc = rdesc, target = target))
}
, .args = list(task.id = 167213))
res = ll$result
row.names(res$data) = as.integer(row.names(res$data))
if (res$task.type == "classif") {
task = mlr3::TaskClassif$new(id = res$task.id, backend = res$data, target = res$target)
} else {
task = mlr3::TaskRegr$new(id = res$task.id, backend = res$data, target = res$target)
}
library(autoxgboost)
library(mlr3tuning)
library(checkmate)
data="data/gisette"
task = readRDS(file.path(data, "task.rds"))
axgb_settings = autoxgboost_space(task, tune.threshold = FALSE)
rsmp_inner = axgb_settings$resampling
learner = axgb_settings$learner
ps = axgb_settings$searchspace
#ps$add(learner$param_set$params$xgboost.booster)
#ps$add(ParamFct$new(id = "xgboost.booster", levels = "gblinear", default = "gblinear"))
ti = TuningInstance$new(task = task, learner = learner, resampling = rsmp_inner, param_set = ps, measures = msr("classif.ce"), terminator = term("evals", n_evals = 1))
tuner = tnr("random_search")
tuner$tune(ti)
throwing the error:
> tuner$tune(ti)
INFO [12:11:27.547] Starting to tune 9 parameters with '<TunerR
andomSearch>' and '<TerminatorEvals>'
INFO [12:11:27.580] Terminator settings: n_evals=1
INFO [12:11:27.632] Evaluating 1 configurations
INFO [12:11:27.639] xgboost.eta xgboost.gamma xgboost.max_dept
h
INFO [12:11:27.639] 0.1015153 -6.913219 1
4
INFO [12:11:27.639] xgboost.colsample_bytree xgboost.colsample
_bylevel
INFO [12:11:27.639] 0.6309831 0
.6852687
INFO [12:11:27.639] xgboost.lambda xgboost.alpha xgboost.subsa
mple
INFO [12:11:27.639] -7.631537 4.986284 0.741
8983
INFO [12:11:27.639] xgboosts.cale_pos_weight
INFO [12:11:27.639] -4.246136
Error in (function (xs) :
Assertion on 'xs' failed: Parameter 'xgboosts.cale_pos_weight'
not available..
When adding a trafo to the parameter set, somehow undesired asserts make unnecessary checks which lead to an assert error.
This makes log sampling currently not possible for random search.
Steps to reproduce:
library(autoxgboost)
library(mlr3tuning)
library(checkmate)
data="data/gisette"
task = readRDS(file.path(data, "task.rds"))
axgb_settings = autoxgboost_space(task, tune.threshold = FALSE)
rsmp_inner = axgb_settings$resampling
learner = axgb_settings$learner
ps = axgb_settings$searchspace
ps$add(
ParamInt$new(
id = "xgboost.nrounds",
lower = as.integer(log(10, 3)),
upper = as.integer(log(2430, 3)),
tags = "budget"
)
)
ps$trafo = function(x, param_set) {
x$xgboost.nrounds = 3^x$xgboost.nrounds
return(x)
}
ti = TuningInstance$new(task = task, learner = learner, resampling = rsmp_inner, param_set = ps, measures = msr("classif.ce"), terminator = term("evals", n_evals = 1))
tuner = tnr("random_search")
tuner$tune(ti)
While removing the trafo gives us no assert error:
library(autoxgboost)
library(mlr3tuning)
library(checkmate)
data="data/gisette"
task = readRDS(file.path(data, "task.rds"))
axgb_settings = autoxgboost_space(task, tune.threshold = FALSE)
rsmp_inner = axgb_settings$resampling
learner = axgb_settings$learner
ps = axgb_settings$searchspace
ps$add(
ParamInt$new(
id = "xgboost.nrounds",
lower = as.integer(log(10, 3)),
upper = as.integer(log(2430, 3)),
tags = "budget"
)
)
ti = TuningInstance$new(task = task, learner = learner, resampling = rsmp_inner, param_set = ps, measures = msr("classif.ce"), terminator = term("evals", n_evals = 1))
tuner = tnr("random_search")
tuner$tune(ti)
A declarative, efficient, and flexible JavaScript library for building user interfaces.
๐ Vue.js is a progressive, incrementally-adoptable JavaScript framework for building UI on the web.
TypeScript is a superset of JavaScript that compiles to clean JavaScript output.
An Open Source Machine Learning Framework for Everyone
The Web framework for perfectionists with deadlines.
A PHP framework for web artisans
Bring data to life with SVG, Canvas and HTML. ๐๐๐
JavaScript (JS) is a lightweight interpreted programming language with first-class functions.
Some thing interesting about web. New door for the world.
A server is a program made to process requests and deliver data to clients.
Machine learning is a way of modeling and interpreting data that allows a piece of software to respond intelligently.
Some thing interesting about visualization, use data art
Some thing interesting about game, make everyone happy.
We are working to build community through open source technology. NB: members must have two-factor auth.
Open source projects and samples from Microsoft.
Google โค๏ธ Open Source for everyone.
Alibaba Open Source for everyone
Data-Driven Documents codes.
China tencent open source team.