Movatterモバイル変換


[0]ホーム

URL:


ContentsMenuExpandLight modeDark modeAuto light/dark, in light modeAuto light/dark, in dark modeSkip to content
onnx-array-api 0.3.4 documentation
Logo
onnx-array-api 0.3.4 documentation

Contents

More

Back to top

Note

Go to the endto download the full example code.

Benchmark of TreeEnsemble implementation

The following example compares the inference time betweenonnxruntime andsklearn.ensemble.RandomForestRegressor,fow different number of estimators, max depth, and parallelization.It does it for a fixed number of rows and features.

import and registration of necessary converters

importpickleimportosimporttimefromitertoolsimportproductimportmatplotlib.pyplotaspltimportnumpyimportpandasfromlightgbmimportLGBMRegressorfromonnxruntimeimportInferenceSession,SessionOptionsfrompsutilimportcpu_countfromsphinx_runpython.runpythonimportrun_cmdfromskl2onnximportto_onnx,update_registered_converterfromskl2onnx.common.shape_calculatorimportcalculate_linear_regressor_output_shapesfromsklearnimportset_configfromsklearn.ensembleimportRandomForestRegressorfromtqdmimporttqdmfromxgboostimportXGBRegressorfromonnxmltools.convert.xgboost.operator_converters.XGBoostimportconvert_xgboostdefskl2onnx_convert_lightgbm(scope,operator,container):fromonnxmltools.convert.lightgbm.operator_converters.LightGbmimport(convert_lightgbm,)options=scope.get_options(operator.raw_operator)operator.split=options.get("split",None)convert_lightgbm(scope,operator,container)update_registered_converter(LGBMRegressor,"LightGbmLGBMRegressor",calculate_linear_regressor_output_shapes,skl2onnx_convert_lightgbm,options={"split":None},)update_registered_converter(XGBRegressor,"XGBoostXGBRegressor",calculate_linear_regressor_output_shapes,convert_xgboost,)# The following instruction reduces the time spent by scikit-learn# to validate the data.set_config(assume_finite=True)

Machine details

print(f"Number of cores:{cpu_count()}")
Number of cores: 20

But this information is not usually enough.Let’s extract the cache information.

try:out,err=run_cmd("lscpu")print(out)exceptExceptionase:print(f"lscpu not available:{e}")
<Popen: returncode: None args: ['lscpu']>

Or with the following command.

out,err=run_cmd("cat /proc/cpuinfo")print(out)
<Popen: returncode: None args: ['cat', '/proc/cpuinfo']>

Fonction to measure inference time

defmeasure_inference(fct,X,repeat,max_time=5,quantile=1):"""    Run *repeat* times the same function on data *X*.    :param fct: fonction to run    :param X: data    :param repeat: number of times to run    :param max_time: maximum time to use to measure the inference    :return: number of runs, sum of the time, average, median    """times=[]for_ninrange(repeat):perf=time.perf_counter()fct(X)delta=time.perf_counter()-perftimes.append(delta)iflen(times)<3:continueifmax_timeisnotNoneandsum(times)>=max_time:breaktimes.sort()quantile=0if(len(times)-quantile*2)<3elsequantileifquantile==0:tt=timeselse:tt=times[quantile:-quantile]return(len(times),sum(times),sum(tt)/len(tt),times[len(times)//2])

Benchmark

The following script benchmarks the inference for the samemodel for a random forest and onnxruntime after it was convertedinto ONNX and for the following configurations.

small=cpu_count()<25ifsmall:N=1000n_features=10n_jobs=[1,cpu_count()//2,cpu_count()]n_ests=[10,20,30]depth=[4,6,8,10]Regressor=RandomForestRegressorelse:N=100000n_features=50n_jobs=[cpu_count(),cpu_count()//2,1]n_ests=[100,200,400]depth=[6,8,10,12,14]Regressor=RandomForestRegressorlegend=f"parallel-nf-{n_features}-"# avoid duplicates on machine with 1 or 2 cores.n_jobs=list(sorted(set(n_jobs),reverse=True))

Benchmark parameters

repeat=7# repeat n times the same inferencequantile=1# exclude extreme timesmax_time=5# maximum number of seconds to spend on one configuration

Data

X=numpy.random.randn(N,n_features).astype(numpy.float32)noise=(numpy.random.randn(X.shape[0])/(n_features//5)).astype(numpy.float32)y=X.mean(axis=1)+noisen_train=min(N,N//3)data=[]couples=list(product(n_jobs,depth,n_ests))bar=tqdm(couples)cache_dir="_cache"ifnotos.path.exists(cache_dir):os.mkdir(cache_dir)forn_j,max_depth,n_estimatorsinbar:ifn_j==1andn_estimators>n_ests[0]:# skippingcontinue# parallelizationcache_name=os.path.join(cache_dir,f"nf-{X.shape[1]}-rf-J-{n_j}-E-{n_estimators}-D-{max_depth}.pkl")ifos.path.exists(cache_name):withopen(cache_name,"rb")asf:rf=pickle.load(f)else:bar.set_description(f"J={n_j} E={n_estimators} D={max_depth} train rf")ifn_j==1andissubclass(Regressor,RandomForestRegressor):rf=Regressor(max_depth=max_depth,n_estimators=n_estimators,n_jobs=-1)rf.fit(X[:n_train],y[:n_train])rf.n_jobs=1else:rf=Regressor(max_depth=max_depth,n_estimators=n_estimators,n_jobs=n_j)rf.fit(X[:n_train],y[:n_train])withopen(cache_name,"wb")asf:pickle.dump(rf,f)bar.set_description(f"J={n_j} E={n_estimators} D={max_depth} ISession")so=SessionOptions()so.intra_op_num_threads=n_jcache_name=os.path.join(cache_dir,f"nf-{X.shape[1]}-rf-J-{n_j}-E-{n_estimators}-D-{max_depth}.onnx")ifos.path.exists(cache_name):sess=InferenceSession(cache_name,so,providers=["CPUExecutionProvider"])else:bar.set_description(f"J={n_j} E={n_estimators} D={max_depth} cvt onnx")onx=to_onnx(rf,X[:1])withopen(cache_name,"wb")asf:f.write(onx.SerializeToString())sess=InferenceSession(cache_name,so,providers=["CPUExecutionProvider"])onx_size=os.stat(cache_name).st_size# run once to avoid counting the first runbar.set_description(f"J={n_j} E={n_estimators} D={max_depth} predict1")rf.predict(X)sess.run(None,{"X":X})# fixed dataobs=dict(n_jobs=n_j,max_depth=max_depth,n_estimators=n_estimators,repeat=repeat,max_time=max_time,name=rf.__class__.__name__,n_rows=X.shape[0],n_features=X.shape[1],onnx_size=onx_size,)# baselinebar.set_description(f"J={n_j} E={n_estimators} D={max_depth} predictB")r,t,mean,med=measure_inference(rf.predict,X,repeat=repeat,max_time=max_time)o1=obs.copy()o1.update(dict(avg=mean,med=med,n_runs=r,ttime=t,name="base"))data.append(o1)# onnxruntimebar.set_description(f"J={n_j} E={n_estimators} D={max_depth} predictO")r,t,mean,med=measure_inference(lambdax,sess=sess:sess.run(None,{"X":x}),X,repeat=repeat,max_time=max_time,)o2=obs.copy()o2.update(dict(avg=mean,med=med,n_runs=r,ttime=t,name="ort_"))data.append(o2)
  0%|          | 0/36 [00:00<?, ?it/s]J=20 E=10 D=4 train rf:   0%|          | 0/36 [00:00<?, ?it/s]J=20 E=10 D=4 ISession:   0%|          | 0/36 [00:00<?, ?it/s]J=20 E=10 D=4 cvt onnx:   0%|          | 0/36 [00:00<?, ?it/s]J=20 E=10 D=4 predict1:   0%|          | 0/36 [00:00<?, ?it/s]J=20 E=10 D=4 predictB:   0%|          | 0/36 [00:00<?, ?it/s]J=20 E=10 D=4 predictO:   0%|          | 0/36 [00:00<?, ?it/s]J=20 E=10 D=4 predictO:   3%|▎         | 1/36 [00:00<00:16,  2.12it/s]J=20 E=20 D=4 train rf:   3%|▎         | 1/36 [00:00<00:16,  2.12it/s]J=20 E=20 D=4 ISession:   3%|▎         | 1/36 [00:00<00:16,  2.12it/s]J=20 E=20 D=4 cvt onnx:   3%|▎         | 1/36 [00:00<00:16,  2.12it/s]J=20 E=20 D=4 predict1:   3%|▎         | 1/36 [00:00<00:16,  2.12it/s]J=20 E=20 D=4 predictB:   3%|▎         | 1/36 [00:00<00:16,  2.12it/s]J=20 E=20 D=4 predictO:   3%|▎         | 1/36 [00:00<00:16,  2.12it/s]J=20 E=20 D=4 predictO:   6%|▌         | 2/36 [00:00<00:17,  2.00it/s]J=20 E=30 D=4 train rf:   6%|▌         | 2/36 [00:00<00:17,  2.00it/s]J=20 E=30 D=4 ISession:   6%|▌         | 2/36 [00:01<00:17,  2.00it/s]J=20 E=30 D=4 cvt onnx:   6%|▌         | 2/36 [00:01<00:17,  2.00it/s]J=20 E=30 D=4 predict1:   6%|▌         | 2/36 [00:01<00:17,  2.00it/s]J=20 E=30 D=4 predictB:   6%|▌         | 2/36 [00:01<00:17,  2.00it/s]J=20 E=30 D=4 predictO:   6%|▌         | 2/36 [00:01<00:17,  2.00it/s]J=20 E=30 D=4 predictO:   8%|▊         | 3/36 [00:01<00:14,  2.20it/s]J=20 E=10 D=6 train rf:   8%|▊         | 3/36 [00:01<00:14,  2.20it/s]J=20 E=10 D=6 ISession:   8%|▊         | 3/36 [00:01<00:14,  2.20it/s]J=20 E=10 D=6 cvt onnx:   8%|▊         | 3/36 [00:01<00:14,  2.20it/s]J=20 E=10 D=6 predict1:   8%|▊         | 3/36 [00:01<00:14,  2.20it/s]J=20 E=10 D=6 predictB:   8%|▊         | 3/36 [00:01<00:14,  2.20it/s]J=20 E=10 D=6 predictO:   8%|▊         | 3/36 [00:01<00:14,  2.20it/s]J=20 E=10 D=6 predictO:  11%|█         | 4/36 [00:01<00:12,  2.51it/s]J=20 E=20 D=6 train rf:  11%|█         | 4/36 [00:01<00:12,  2.51it/s]J=20 E=20 D=6 ISession:  11%|█         | 4/36 [00:01<00:12,  2.51it/s]J=20 E=20 D=6 cvt onnx:  11%|█         | 4/36 [00:01<00:12,  2.51it/s]J=20 E=20 D=6 predict1:  11%|█         | 4/36 [00:01<00:12,  2.51it/s]J=20 E=20 D=6 predictB:  11%|█         | 4/36 [00:01<00:12,  2.51it/s]J=20 E=20 D=6 predictO:  11%|█         | 4/36 [00:01<00:12,  2.51it/s]J=20 E=20 D=6 predictO:  14%|█▍        | 5/36 [00:01<00:11,  2.77it/s]J=20 E=30 D=6 train rf:  14%|█▍        | 5/36 [00:01<00:11,  2.77it/s]J=20 E=30 D=6 ISession:  14%|█▍        | 5/36 [00:02<00:11,  2.77it/s]J=20 E=30 D=6 cvt onnx:  14%|█▍        | 5/36 [00:02<00:11,  2.77it/s]J=20 E=30 D=6 predict1:  14%|█▍        | 5/36 [00:02<00:11,  2.77it/s]J=20 E=30 D=6 predictB:  14%|█▍        | 5/36 [00:02<00:11,  2.77it/s]J=20 E=30 D=6 predictO:  14%|█▍        | 5/36 [00:02<00:11,  2.77it/s]J=20 E=30 D=6 predictO:  17%|█▋        | 6/36 [00:02<00:11,  2.72it/s]J=20 E=10 D=8 train rf:  17%|█▋        | 6/36 [00:02<00:11,  2.72it/s]J=20 E=10 D=8 ISession:  17%|█▋        | 6/36 [00:02<00:11,  2.72it/s]J=20 E=10 D=8 cvt onnx:  17%|█▋        | 6/36 [00:02<00:11,  2.72it/s]J=20 E=10 D=8 predict1:  17%|█▋        | 6/36 [00:02<00:11,  2.72it/s]J=20 E=10 D=8 predictB:  17%|█▋        | 6/36 [00:02<00:11,  2.72it/s]J=20 E=10 D=8 predictO:  17%|█▋        | 6/36 [00:02<00:11,  2.72it/s]J=20 E=10 D=8 predictO:  19%|█▉        | 7/36 [00:02<00:10,  2.80it/s]J=20 E=20 D=8 train rf:  19%|█▉        | 7/36 [00:02<00:10,  2.80it/s]J=20 E=20 D=8 ISession:  19%|█▉        | 7/36 [00:02<00:10,  2.80it/s]J=20 E=20 D=8 cvt onnx:  19%|█▉        | 7/36 [00:02<00:10,  2.80it/s]J=20 E=20 D=8 predict1:  19%|█▉        | 7/36 [00:02<00:10,  2.80it/s]J=20 E=20 D=8 predictB:  19%|█▉        | 7/36 [00:02<00:10,  2.80it/s]J=20 E=20 D=8 predictO:  19%|█▉        | 7/36 [00:03<00:10,  2.80it/s]J=20 E=20 D=8 predictO:  22%|██▏       | 8/36 [00:03<00:10,  2.65it/s]J=20 E=30 D=8 train rf:  22%|██▏       | 8/36 [00:03<00:10,  2.65it/s]J=20 E=30 D=8 ISession:  22%|██▏       | 8/36 [00:03<00:10,  2.65it/s]J=20 E=30 D=8 cvt onnx:  22%|██▏       | 8/36 [00:03<00:10,  2.65it/s]J=20 E=30 D=8 predict1:  22%|██▏       | 8/36 [00:03<00:10,  2.65it/s]J=20 E=30 D=8 predictB:  22%|██▏       | 8/36 [00:03<00:10,  2.65it/s]J=20 E=30 D=8 predictO:  22%|██▏       | 8/36 [00:03<00:10,  2.65it/s]J=20 E=30 D=8 predictO:  25%|██▌       | 9/36 [00:03<00:11,  2.32it/s]J=20 E=10 D=10 train rf:  25%|██▌       | 9/36 [00:03<00:11,  2.32it/s]J=20 E=10 D=10 ISession:  25%|██▌       | 9/36 [00:03<00:11,  2.32it/s]J=20 E=10 D=10 cvt onnx:  25%|██▌       | 9/36 [00:03<00:11,  2.32it/s]J=20 E=10 D=10 predict1:  25%|██▌       | 9/36 [00:05<00:11,  2.32it/s]J=20 E=10 D=10 predictB:  25%|██▌       | 9/36 [00:05<00:11,  2.32it/s]J=20 E=10 D=10 predictO:  25%|██▌       | 9/36 [00:05<00:11,  2.32it/s]J=20 E=10 D=10 predictO:  28%|██▊       | 10/36 [00:05<00:20,  1.24it/s]J=20 E=20 D=10 train rf:  28%|██▊       | 10/36 [00:05<00:20,  1.24it/s]J=20 E=20 D=10 ISession:  28%|██▊       | 10/36 [00:05<00:20,  1.24it/s]J=20 E=20 D=10 cvt onnx:  28%|██▊       | 10/36 [00:05<00:20,  1.24it/s]J=20 E=20 D=10 predict1:  28%|██▊       | 10/36 [00:05<00:20,  1.24it/s]J=20 E=20 D=10 predictB:  28%|██▊       | 10/36 [00:05<00:20,  1.24it/s]J=20 E=20 D=10 predictO:  28%|██▊       | 10/36 [00:05<00:20,  1.24it/s]J=20 E=20 D=10 predictO:  31%|███       | 11/36 [00:05<00:18,  1.34it/s]J=20 E=30 D=10 train rf:  31%|███       | 11/36 [00:05<00:18,  1.34it/s]J=20 E=30 D=10 ISession:  31%|███       | 11/36 [00:06<00:18,  1.34it/s]J=20 E=30 D=10 cvt onnx:  31%|███       | 11/36 [00:06<00:18,  1.34it/s]J=20 E=30 D=10 predict1:  31%|███       | 11/36 [00:06<00:18,  1.34it/s]J=20 E=30 D=10 predictB:  31%|███       | 11/36 [00:06<00:18,  1.34it/s]J=20 E=30 D=10 predictO:  31%|███       | 11/36 [00:06<00:18,  1.34it/s]J=20 E=30 D=10 predictO:  33%|███▎      | 12/36 [00:06<00:16,  1.46it/s]J=10 E=10 D=4 train rf:  33%|███▎      | 12/36 [00:06<00:16,  1.46it/s]J=10 E=10 D=4 ISession:  33%|███▎      | 12/36 [00:06<00:16,  1.46it/s]J=10 E=10 D=4 cvt onnx:  33%|███▎      | 12/36 [00:06<00:16,  1.46it/s]J=10 E=10 D=4 predict1:  33%|███▎      | 12/36 [00:06<00:16,  1.46it/s]J=10 E=10 D=4 predictB:  33%|███▎      | 12/36 [00:06<00:16,  1.46it/s]J=10 E=10 D=4 predictO:  33%|███▎      | 12/36 [00:06<00:16,  1.46it/s]J=10 E=10 D=4 predictO:  36%|███▌      | 13/36 [00:06<00:12,  1.85it/s]J=10 E=20 D=4 train rf:  36%|███▌      | 13/36 [00:06<00:12,  1.85it/s]J=10 E=20 D=4 ISession:  36%|███▌      | 13/36 [00:06<00:12,  1.85it/s]J=10 E=20 D=4 cvt onnx:  36%|███▌      | 13/36 [00:06<00:12,  1.85it/s]J=10 E=20 D=4 predict1:  36%|███▌      | 13/36 [00:06<00:12,  1.85it/s]J=10 E=20 D=4 predictB:  36%|███▌      | 13/36 [00:06<00:12,  1.85it/s]J=10 E=20 D=4 predictO:  36%|███▌      | 13/36 [00:06<00:12,  1.85it/s]J=10 E=20 D=4 predictO:  39%|███▉      | 14/36 [00:06<00:09,  2.28it/s]J=10 E=30 D=4 train rf:  39%|███▉      | 14/36 [00:06<00:09,  2.28it/s]J=10 E=30 D=4 ISession:  39%|███▉      | 14/36 [00:06<00:09,  2.28it/s]J=10 E=30 D=4 cvt onnx:  39%|███▉      | 14/36 [00:06<00:09,  2.28it/s]J=10 E=30 D=4 predict1:  39%|███▉      | 14/36 [00:06<00:09,  2.28it/s]J=10 E=30 D=4 predictB:  39%|███▉      | 14/36 [00:07<00:09,  2.28it/s]J=10 E=30 D=4 predictO:  39%|███▉      | 14/36 [00:07<00:09,  2.28it/s]J=10 E=30 D=4 predictO:  42%|████▏     | 15/36 [00:07<00:08,  2.47it/s]J=10 E=10 D=6 train rf:  42%|████▏     | 15/36 [00:07<00:08,  2.47it/s]J=10 E=10 D=6 ISession:  42%|████▏     | 15/36 [00:07<00:08,  2.47it/s]J=10 E=10 D=6 cvt onnx:  42%|████▏     | 15/36 [00:07<00:08,  2.47it/s]J=10 E=10 D=6 predict1:  42%|████▏     | 15/36 [00:07<00:08,  2.47it/s]J=10 E=10 D=6 predictB:  42%|████▏     | 15/36 [00:07<00:08,  2.47it/s]J=10 E=10 D=6 predictO:  42%|████▏     | 15/36 [00:07<00:08,  2.47it/s]J=10 E=10 D=6 predictO:  44%|████▍     | 16/36 [00:07<00:06,  2.96it/s]J=10 E=20 D=6 train rf:  44%|████▍     | 16/36 [00:07<00:06,  2.96it/s]J=10 E=20 D=6 ISession:  44%|████▍     | 16/36 [00:07<00:06,  2.96it/s]J=10 E=20 D=6 cvt onnx:  44%|████▍     | 16/36 [00:07<00:06,  2.96it/s]J=10 E=20 D=6 predict1:  44%|████▍     | 16/36 [00:07<00:06,  2.96it/s]J=10 E=20 D=6 predictB:  44%|████▍     | 16/36 [00:07<00:06,  2.96it/s]J=10 E=20 D=6 predictO:  44%|████▍     | 16/36 [00:07<00:06,  2.96it/s]J=10 E=20 D=6 predictO:  47%|████▋     | 17/36 [00:07<00:05,  3.18it/s]J=10 E=30 D=6 train rf:  47%|████▋     | 17/36 [00:07<00:05,  3.18it/s]J=10 E=30 D=6 ISession:  47%|████▋     | 17/36 [00:07<00:05,  3.18it/s]J=10 E=30 D=6 cvt onnx:  47%|████▋     | 17/36 [00:07<00:05,  3.18it/s]J=10 E=30 D=6 predict1:  47%|████▋     | 17/36 [00:07<00:05,  3.18it/s]J=10 E=30 D=6 predictB:  47%|████▋     | 17/36 [00:07<00:05,  3.18it/s]J=10 E=30 D=6 predictO:  47%|████▋     | 17/36 [00:08<00:05,  3.18it/s]J=10 E=30 D=6 predictO:  50%|█████     | 18/36 [00:08<00:05,  3.08it/s]J=10 E=10 D=8 train rf:  50%|█████     | 18/36 [00:08<00:05,  3.08it/s]J=10 E=10 D=8 ISession:  50%|█████     | 18/36 [00:08<00:05,  3.08it/s]J=10 E=10 D=8 cvt onnx:  50%|█████     | 18/36 [00:08<00:05,  3.08it/s]J=10 E=10 D=8 predict1:  50%|█████     | 18/36 [00:08<00:05,  3.08it/s]J=10 E=10 D=8 predictB:  50%|█████     | 18/36 [00:08<00:05,  3.08it/s]J=10 E=10 D=8 predictO:  50%|█████     | 18/36 [00:08<00:05,  3.08it/s]J=10 E=10 D=8 predictO:  53%|█████▎    | 19/36 [00:08<00:04,  3.50it/s]J=10 E=20 D=8 train rf:  53%|█████▎    | 19/36 [00:08<00:04,  3.50it/s]J=10 E=20 D=8 ISession:  53%|█████▎    | 19/36 [00:08<00:04,  3.50it/s]J=10 E=20 D=8 cvt onnx:  53%|█████▎    | 19/36 [00:08<00:04,  3.50it/s]J=10 E=20 D=8 predict1:  53%|█████▎    | 19/36 [00:08<00:04,  3.50it/s]J=10 E=20 D=8 predictB:  53%|█████▎    | 19/36 [00:08<00:04,  3.50it/s]J=10 E=20 D=8 predictO:  53%|█████▎    | 19/36 [00:08<00:04,  3.50it/s]J=10 E=20 D=8 predictO:  56%|█████▌    | 20/36 [00:08<00:04,  3.57it/s]J=10 E=30 D=8 train rf:  56%|█████▌    | 20/36 [00:08<00:04,  3.57it/s]J=10 E=30 D=8 ISession:  56%|█████▌    | 20/36 [00:08<00:04,  3.57it/s]J=10 E=30 D=8 cvt onnx:  56%|█████▌    | 20/36 [00:08<00:04,  3.57it/s]J=10 E=30 D=8 predict1:  56%|█████▌    | 20/36 [00:08<00:04,  3.57it/s]J=10 E=30 D=8 predictB:  56%|█████▌    | 20/36 [00:08<00:04,  3.57it/s]J=10 E=30 D=8 predictO:  56%|█████▌    | 20/36 [00:08<00:04,  3.57it/s]J=10 E=30 D=8 predictO:  58%|█████▊    | 21/36 [00:08<00:04,  3.36it/s]J=10 E=10 D=10 train rf:  58%|█████▊    | 21/36 [00:08<00:04,  3.36it/s]J=10 E=10 D=10 ISession:  58%|█████▊    | 21/36 [00:08<00:04,  3.36it/s]J=10 E=10 D=10 cvt onnx:  58%|█████▊    | 21/36 [00:08<00:04,  3.36it/s]J=10 E=10 D=10 predict1:  58%|█████▊    | 21/36 [00:08<00:04,  3.36it/s]J=10 E=10 D=10 predictB:  58%|█████▊    | 21/36 [00:08<00:04,  3.36it/s]J=10 E=10 D=10 predictO:  58%|█████▊    | 21/36 [00:09<00:04,  3.36it/s]J=10 E=10 D=10 predictO:  61%|██████    | 22/36 [00:09<00:03,  3.69it/s]J=10 E=20 D=10 train rf:  61%|██████    | 22/36 [00:09<00:03,  3.69it/s]J=10 E=20 D=10 ISession:  61%|██████    | 22/36 [00:09<00:03,  3.69it/s]J=10 E=20 D=10 cvt onnx:  61%|██████    | 22/36 [00:09<00:03,  3.69it/s]J=10 E=20 D=10 predict1:  61%|██████    | 22/36 [00:09<00:03,  3.69it/s]J=10 E=20 D=10 predictB:  61%|██████    | 22/36 [00:09<00:03,  3.69it/s]J=10 E=20 D=10 predictO:  61%|██████    | 22/36 [00:09<00:03,  3.69it/s]J=10 E=20 D=10 predictO:  64%|██████▍   | 23/36 [00:09<00:03,  3.85it/s]J=10 E=30 D=10 train rf:  64%|██████▍   | 23/36 [00:09<00:03,  3.85it/s]J=10 E=30 D=10 ISession:  64%|██████▍   | 23/36 [00:09<00:03,  3.85it/s]J=10 E=30 D=10 cvt onnx:  64%|██████▍   | 23/36 [00:09<00:03,  3.85it/s]J=10 E=30 D=10 predict1:  64%|██████▍   | 23/36 [00:09<00:03,  3.85it/s]J=10 E=30 D=10 predictB:  64%|██████▍   | 23/36 [00:09<00:03,  3.85it/s]J=10 E=30 D=10 predictO:  64%|██████▍   | 23/36 [00:09<00:03,  3.85it/s]J=10 E=30 D=10 predictO:  67%|██████▋   | 24/36 [00:09<00:03,  3.43it/s]J=1 E=10 D=4 train rf:  67%|██████▋   | 24/36 [00:09<00:03,  3.43it/s]J=1 E=10 D=4 ISession:  67%|██████▋   | 24/36 [00:09<00:03,  3.43it/s]J=1 E=10 D=4 cvt onnx:  67%|██████▋   | 24/36 [00:09<00:03,  3.43it/s]J=1 E=10 D=4 predict1:  67%|██████▋   | 24/36 [00:09<00:03,  3.43it/s]J=1 E=10 D=4 predictB:  67%|██████▋   | 24/36 [00:09<00:03,  3.43it/s]J=1 E=10 D=4 predictO:  67%|██████▋   | 24/36 [00:09<00:03,  3.43it/s]J=1 E=10 D=6 train rf:  67%|██████▋   | 24/36 [00:09<00:03,  3.43it/s]J=1 E=10 D=6 ISession:  67%|██████▋   | 24/36 [00:09<00:03,  3.43it/s]J=1 E=10 D=6 cvt onnx:  67%|██████▋   | 24/36 [00:09<00:03,  3.43it/s]J=1 E=10 D=6 predict1:  67%|██████▋   | 24/36 [00:09<00:03,  3.43it/s]J=1 E=10 D=6 predictB:  67%|██████▋   | 24/36 [00:09<00:03,  3.43it/s]J=1 E=10 D=6 predictO:  67%|██████▋   | 24/36 [00:09<00:03,  3.43it/s]J=1 E=10 D=6 predictO:  78%|███████▊  | 28/36 [00:09<00:00,  8.12it/s]J=1 E=10 D=8 train rf:  78%|███████▊  | 28/36 [00:09<00:00,  8.12it/s]J=1 E=10 D=8 ISession:  78%|███████▊  | 28/36 [00:09<00:00,  8.12it/s]J=1 E=10 D=8 cvt onnx:  78%|███████▊  | 28/36 [00:09<00:00,  8.12it/s]J=1 E=10 D=8 predict1:  78%|███████▊  | 28/36 [00:09<00:00,  8.12it/s]J=1 E=10 D=8 predictB:  78%|███████▊  | 28/36 [00:09<00:00,  8.12it/s]J=1 E=10 D=8 predictO:  78%|███████▊  | 28/36 [00:09<00:00,  8.12it/s]J=1 E=10 D=10 train rf:  78%|███████▊  | 28/36 [00:09<00:00,  8.12it/s]J=1 E=10 D=10 ISession:  78%|███████▊  | 28/36 [00:09<00:00,  8.12it/s]J=1 E=10 D=10 cvt onnx:  78%|███████▊  | 28/36 [00:09<00:00,  8.12it/s]J=1 E=10 D=10 predict1:  78%|███████▊  | 28/36 [00:09<00:00,  8.12it/s]J=1 E=10 D=10 predictB:  78%|███████▊  | 28/36 [00:09<00:00,  8.12it/s]J=1 E=10 D=10 predictO:  78%|███████▊  | 28/36 [00:09<00:00,  8.12it/s]J=1 E=10 D=10 predictO:  94%|█████████▍| 34/36 [00:09<00:00, 14.98it/s]J=1 E=10 D=10 predictO: 100%|██████████| 36/36 [00:09<00:00,  3.65it/s]

Saving data

name=os.path.join(cache_dir,"plot_beanchmark_rf")print(f"Saving data into{name!r}")df=pandas.DataFrame(data)df2=df.copy()df2["legend"]=legenddf2.to_csv(f"{name}-{legend}.csv",index=False)
Saving data into '_cache/plot_beanchmark_rf'

Printing the data

n_jobsmax_depthn_estimatorsrepeatmax_timenamen_rowsn_featuresonnx_sizeavgmedn_runsttime
02041075base100010114600.0204020.01929270.166615
12041075ort_100010114600.0004320.00043870.003177
22042075base100010221450.0402050.04259570.286761
32042075ort_100010221450.0009920.00072670.008394
42043075base100010325360.0274680.02886870.206444
52043075ort_100010325360.0005060.00036570.004230
62061075base100010345300.0196790.01735470.171011
72061075ort_100010345300.0001030.00007470.001878
82062075base100010665290.0178200.01748970.132340
92062075ort_100010665290.0001560.00013970.001469
102063075base1000101034200.0221060.02120770.158302
112063075ort_1000101034200.0003210.00033070.003994
122081075base100010713500.0300770.02622770.217749
132081075ort_100010713500.0002840.00013570.002352
142082075base1000101441670.0240290.02163470.200874
152082075ort_1000101441670.0004880.00037570.004292
162083075base1000102141100.0442210.03948370.318789
172083075ort_1000102141100.0007970.00047270.006995
1820101075base1000101220480.0287820.03130770.197500
1920101075ort_1000101220480.0001590.00014370.001353
2020102075base1000102194420.0384510.03494970.271147
2120102075ort_1000102194420.0002470.00023270.007190
2220103075base1000103340720.0213350.01902970.169890
2320103075ort_1000103340720.0003160.00032170.002366
241041075base100010116790.0179940.01796970.125971
251041075ort_100010116790.0002190.00022670.001612
261042075base100010226560.0175070.01757170.130793
271042075ort_100010226560.0001920.00017770.001623
281043075base100010334120.0291990.02947670.203991
291043075ort_100010334120.0003100.00024870.002301
301061075base100010337270.0177880.01776070.123793
311061075ort_100010337270.0001810.00009270.001484
321062075base100010668940.0251620.02911370.174407
331062075ort_100010668940.0002450.00023570.001975
341063075base1000101019600.0300690.02991070.211250
351063075ort_1000101019600.0002750.00024870.002105
361081075base100010735320.0185030.01857570.129667
371081075ort_100010735320.0002190.00017970.001621
381082075base1000101495510.0210800.01924770.153584
391082075ort_1000101495510.0002120.00021170.001832
401083075base1000102222100.0291590.02902270.205343
411083075ort_1000102222100.0004060.00039770.003140
4210101075base1000101147990.0181030.01779770.128269
4310101075ort_1000101147990.0002240.00015370.002583
4410102075base1000102277080.0170790.01707370.125737
4510102075ort_1000102277080.0003160.00029670.002357
4610103075base1000103389250.0281740.02769070.197377
4710103075ort_1000103389250.0005300.00052970.004071
48141075base100010111680.0007850.00071270.005677
49141075ort_100010111680.0002580.00026870.001803
50161075base100010332160.0008320.00076970.005944
51161075ort_100010332160.0003210.00031370.002284
52181075base100010772750.0010400.00106670.007312
53181075ort_100010772750.0004560.00044870.003227
541101075base1000101172180.0010150.00101170.007275
551101075ort_1000101172180.0005310.00052370.003769


Plot

n_rows=len(n_jobs)n_cols=len(n_ests)fig,axes=plt.subplots(n_rows,n_cols,figsize=(4*n_cols,4*n_rows))fig.suptitle(f"{rf.__class__.__name__}\nX.shape={X.shape}")forn_j,n_estimatorsintqdm(product(n_jobs,n_ests)):i=n_jobs.index(n_j)j=n_ests.index(n_estimators)ax=axes[i,j]subdf=df[(df.n_estimators==n_estimators)&(df.n_jobs==n_j)]ifsubdf.shape[0]==0:continuepiv=subdf.pivot(index="max_depth",columns="name",values=["avg","med"])piv.plot(ax=ax,title=f"jobs={n_j}, trees={n_estimators}")ax.set_ylabel(f"n_jobs={n_j}",fontsize="small")ax.set_xlabel("max_depth",fontsize="small")# ratioax2=ax.twinx()piv1=subdf.pivot(index="max_depth",columns="name",values="avg")piv1["speedup"]=piv1.base/piv1.ort_ax2.plot(piv1.index,piv1.speedup,"b--",label="speedup avg")piv1=subdf.pivot(index="max_depth",columns="name",values="med")piv1["speedup"]=piv1.base/piv1.ort_ax2.plot(piv1.index,piv1.speedup,"y--",label="speedup med")ax2.legend(fontsize="x-small")# 1ax2.plot(piv1.index,[1for_inpiv1.index],"k--",label="no speedup")foriinrange(axes.shape[0]):forjinrange(axes.shape[1]):axes[i,j].legend(fontsize="small")fig.tight_layout()fig.savefig(f"{name}-{legend}.png")# plt.show()
RandomForestRegressor X.shape=(1000, 10), jobs=20, trees=10, jobs=20, trees=20, jobs=20, trees=30, jobs=10, trees=10, jobs=10, trees=20, jobs=10, trees=30, jobs=1, trees=10
0it [00:00, ?it/s]4it [00:00, 38.92it/s]8it [00:00, 32.62it/s]9it [00:00, 37.54it/s]~/github/onnx-array-api/_doc/examples/plot_benchmark_rf.py:307: UserWarning: No artists with labels found to put in legend.  Note that artists whose label start with an underscore are ignored when legend() is called with no argument.  axes[i, j].legend(fontsize="small")

Total running time of the script: (0 minutes 13.510 seconds)

Gallery generated by Sphinx-Gallery

On this page

[8]ページ先頭

©2009-2025 Movatter.jp