diff --git a/TLE_to_train.py b/TLE_to_train.py new file mode 100644 index 0000000..1a8d0f6 --- /dev/null +++ b/TLE_to_train.py @@ -0,0 +1,152 @@ +# Copyright 2020 IBM Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Standard libraries +import os +import logging +import itertools +import datetime as dt +# Data processing libraries +import numpy as np +import pandas as pd +from tqdm import tqdm +from joblib import Parallel, delayed +# Physics model +from orbit_prediction import get_state_vect_cols +from orbit_prediction.physics_model import PhysicsModel + +logging.basicConfig(level=os.environ.get("LOGLEVEL", "INFO")) +logger = logging.getLogger(__name__) + + +def predict_orbit(window): + """Predict the state vectors of each future timestep in the given `window` + using a physics astrodynamics model. + + :param window: The window of timesteps to predict the orbit of the ASO for + :type window: pandas.DataFrame + + :return: The original timestep rows with the predicted state vectors added + :rtype: pandas.DataFrame + """ + # The `window` DataFrame is reverse sorted by time so the starting position + # is the last row + start_row = window.iloc[-1] + start_epoch = start_row.name + # Get the column names of the state vector components + state_vect_comps = get_state_vect_cols() + # Extract the position and velocity vectors as a numpy array + start_state_vect = start_row[state_vect_comps].to_numpy() + start_state = np.concatenate((np.array([start_epoch]), + start_state_vect)) + # Build an orbit model + orbit_model = PhysicsModel() + orbit_model.fit([start_state]) + future_rows = window.iloc[:-1].reset_index() + # We add the epoch and the state vector components of the starting row + # to the rows we will use the physics model to make predictions for + future_rows['start_epoch'] = start_epoch + for svc in state_vect_comps: + future_rows[f'start_{svc}'] = start_row[svc] + # Calculate the elapsed time from the starting epoch to the + # the epoch of all the rows to make predictions for + time_deltas = future_rows.epoch - future_rows.start_epoch + elapsed_seconds = time_deltas.dt.total_seconds() + future_rows['elapsed_seconds'] = elapsed_seconds + physics_cols = [f'physics_pred_{svc}' for svc in state_vect_comps] + # Predict the state vectors for each of the rows in the "future" + predicted_orbits = orbit_model.predict([elapsed_seconds.to_numpy()]) + try: + future_rows[physics_cols] = predicted_orbits[0] + except Exception as ex: + print("asshole",f"{ex}\n",window,future_rows) + return future_rows + + +def predict_orbits(df, last_n_days, n_pred_days): + """Use a physics astrodynamics model to predict the orbits of the ASOs + in the provided DataFrame. + + :param df: The DataFrame containing the observed orbital state vectors + to use to make predictions from + :type df: pandas.DataFrame + + :param last_n_days: Filter the DataFrame to use rows from only the last + `n` days. Use all the rows if `None` is passed, but this may take a + very long time to run + :type last_n_days: int + + :param n_pred_days: The number of days in the rolling prediction window + :type n_pred_days: int + """ + if last_n_days: + time_cutoff = df.epoch.max() - dt.timedelta(days=last_n_days) + df = df[df.epoch >= time_cutoff] + epoch_df = df.sort_values('epoch', ascending=False).set_index('epoch') + pred_window_length = f'{n_pred_days}d' + # For each row in `df` we create a window of all of the observations for + # that ASO that are within `n_pred_days` of the given row + window_cols = ['aso_id', pd.Grouper(freq=pred_window_length)] + windows = [w[1] for w in epoch_df.groupby(window_cols)] + # Predict the orbits in each window in parallel + window_dfs = Parallel(n_jobs=-1)(delayed(predict_orbit)(w) + for w in tqdm(windows)) + # Join all of the window prediction DataFrames into a single DataFrame + physics_pred_df = pd.concat(window_dfs).reset_index(drop=True) + return physics_pred_df + + +def calc_physics_error(df): + """Calculates the error in the state vector components between the ground truth + observations and the physics model predictions. + + :param df: The DataFrame containing the ground truth observations and the + physics model predictions + :type df: pandas.DataFrame + + :return: The input DataFrame with the physical model error column added + :rtype: pandas.DataFrame + """ + comps = ['x', 'y', 'z'] + vects = ['r', 'v'] + for vect, comp in itertools.product(vects, comps): + comp_col = f'{vect}_{comp}' + err_col = f'physics_err_{comp_col}' + err_val = df[f'physics_pred_{comp_col}'] - df[comp_col] + df[err_col] = err_val + return df + + +def run(): + """Builds a training data set of physics model errors based on the + parameters supplied by the CLI. + + :param args: The command line arguments + :type args: argparse.Namespace + """ + logger.info('Loading input DataFrame...') + input_df = pd.read_parquet("/home/lj020/Downloads/data6.parquet") + logger.info('Predicting orbits...') + physics_pred_df = predict_orbits(input_df, + last_n_days=None, + n_pred_days=5) + logger.info('Calculating physical model error...') + physics_pred_df = calc_physics_error(physics_pred_df) + logger.info('Serializing results...') + physics_pred_df.to_parquet("/home/lj020/Downloads/train_result.parquet") + + with open("/home/lj020/Downloads/train_result.txt","w") as f: + f.write(physics_pred_df.to_string()) + +run() \ No newline at end of file diff --git a/TLEdata_extract.py b/TLEdata_extract.py new file mode 100644 index 0000000..c010143 --- /dev/null +++ b/TLEdata_extract.py @@ -0,0 +1,72 @@ +from tletools import TLE +import pandas as pd +from astropy import units as u + + +def get_tles(raw_tle_str): + """Parses the raw TLE string and converts it to TLE objects. + + :param raw_tle_str: The raw string form of the TLEs + :type raw_tle_str: str + + :return: The parsed object representations of the TLEs + :rtype: [tletools.TLE] + """ + cutnum = 2 + all_tle_lines = raw_tle_str.strip().splitlines() + tles = [] + for i in range(len(all_tle_lines)//cutnum): + # Calculate offset + j = i*cutnum + tle_lines = ["noaa-06"] + all_tle_lines[j:j+cutnum] + print(tle_lines) + # Strip line number from object name line + # tle_lines[0] = tle_lines[0][2:] + tle = TLE.from_lines(*tle_lines) + tles.append(tle) + # print(tles) + return tles + + +def get_aso_data(tles): + """Extracts the necessary data from the TLE objects for doing orbital + prediction. + + :param tles: The list of TLE objects to extract orbit information from + :type tles: [tletools.TLE] + + :return: A DataFrame of the extracted TLE data + :rtype: pandas.DataFrame + """ + tles_data = [] + for tle in tles: + aso_data = {} + aso_data['aso_name'] = tle.name + aso_data['aso_id'] = tle.norad + aso_data['epoch'] = tle.epoch.to_datetime() + # Convert the TLE object to a poliastro.twobody.Orbit instance + orbit = tle.to_orbit() + # Calculate the position and velocity vectors + r, v = orbit.rv() + # Convert position vector from kilometers to meters + r_m = r.to(u.m).to_value() + # Convert the velocity vector from km/s to m/s + v_ms = v.to(u.m/u.s).to_value() + # Extract the components of the state vectiors + aso_data['r_x'], aso_data['r_y'], aso_data['r_z'] = r_m + aso_data['v_x'], aso_data['v_y'], aso_data['v_z'] = v_ms + tles_data.append(aso_data) + return pd.DataFrame(tles_data) + + +full_tle = [] +with open("/home/lj020/Downloads/noaa.txt","r") as f: + raw_tle = f.read() + l = get_tles(raw_tle_str=raw_tle) + full_tle += l + +w = get_aso_data(full_tle) +w.to_parquet("/home/lj020/Downloads/data6.parquet") +with open("/home/lj020/Downloads/data6.txt","w") as f: + f.write(w.to_string()) +# print(w) diff --git a/catboost_info/catboost_training.json b/catboost_info/catboost_training.json index e118341..7cd14a0 100644 --- a/catboost_info/catboost_training.json +++ b/catboost_info/catboost_training.json @@ -1,1004 +1,1004 @@ { "meta":{"test_sets":[],"test_metrics":[],"learn_metrics":[{"best_value":"Min","name":"RMSE"}],"launch_mode":"Train","parameters":"","iteration_count":1000,"learn_sets":["learn"],"name":"experiment"}, "iterations":[ -{"learn":[3662.575683],"iteration":0,"passed_time":0.001048224102,"remaining_time":1.047175878}, -{"learn":[3555.636322],"iteration":1,"passed_time":0.002035863078,"remaining_time":1.015895676}, -{"learn":[3450.779167],"iteration":2,"passed_time":0.002903879778,"remaining_time":0.9650560462}, -{"learn":[3350.517801],"iteration":3,"passed_time":0.003862126061,"remaining_time":0.9616693892}, -{"learn":[3255.662239],"iteration":4,"passed_time":0.004641087248,"remaining_time":0.9235763624}, -{"learn":[3168.341562],"iteration":5,"passed_time":0.005337226138,"remaining_time":0.8842004635}, -{"learn":[3083.09696],"iteration":6,"passed_time":0.006049819452,"remaining_time":0.8582101022}, -{"learn":[3001.252663],"iteration":7,"passed_time":0.006773348314,"remaining_time":0.839895191}, -{"learn":[2917.435051],"iteration":8,"passed_time":0.007520144908,"remaining_time":0.8280515115}, -{"learn":[2837.145765],"iteration":9,"passed_time":0.008287206221,"remaining_time":0.8204334159}, -{"learn":[2766.450459],"iteration":10,"passed_time":0.009105048449,"remaining_time":0.8186266287}, -{"learn":[2688.914783],"iteration":11,"passed_time":0.009847723869,"remaining_time":0.8107959319}, -{"learn":[2614.472366],"iteration":12,"passed_time":0.01053569021,"remaining_time":0.7999020182}, -{"learn":[2548.570089],"iteration":13,"passed_time":0.01128033416,"remaining_time":0.7944578204}, -{"learn":[2490.007404],"iteration":14,"passed_time":0.011977249,"remaining_time":0.7865060177}, -{"learn":[2428.190922],"iteration":15,"passed_time":0.01267087238,"remaining_time":0.7792586516}, -{"learn":[2363.192483],"iteration":16,"passed_time":0.01340637092,"remaining_time":0.775203683}, -{"learn":[2309.161851],"iteration":17,"passed_time":0.01411464756,"remaining_time":0.7700324389}, -{"learn":[2251.326317],"iteration":18,"passed_time":0.01481760581,"remaining_time":0.765056384}, -{"learn":[2194.518603],"iteration":19,"passed_time":0.01549458959,"remaining_time":0.7592348901}, -{"learn":[2134.386896],"iteration":20,"passed_time":0.01644906333,"remaining_time":0.7668396665}, -{"learn":[2077.449324],"iteration":21,"passed_time":0.01732082338,"remaining_time":0.7699893301}, -{"learn":[2027.220285],"iteration":22,"passed_time":0.01816675104,"remaining_time":0.77169199}, -{"learn":[1984.605605],"iteration":23,"passed_time":0.01895777912,"remaining_time":0.7709496841}, -{"learn":[1931.101868],"iteration":24,"passed_time":0.01974026714,"remaining_time":0.7698704186}, -{"learn":[1893.116137],"iteration":25,"passed_time":0.02052408699,"remaining_time":0.7688638742}, -{"learn":[1842.931837],"iteration":26,"passed_time":0.0212543615,"remaining_time":0.7659442126}, -{"learn":[1795.250205],"iteration":27,"passed_time":0.02195648149,"remaining_time":0.7622035716}, -{"learn":[1751.063766],"iteration":28,"passed_time":0.02268154978,"remaining_time":0.7594408564}, -{"learn":[1712.884824],"iteration":29,"passed_time":0.02341985013,"remaining_time":0.7572418208}, -{"learn":[1667.80013],"iteration":30,"passed_time":0.02412037335,"remaining_time":0.7539561863}, -{"learn":[1630.782543],"iteration":31,"passed_time":0.0248340193,"remaining_time":0.7512290839}, -{"learn":[1590.474703],"iteration":32,"passed_time":0.02558644587,"remaining_time":0.7497603988}, -{"learn":[1551.389736],"iteration":33,"passed_time":0.02633511307,"remaining_time":0.7482270362}, -{"learn":[1516.435531],"iteration":34,"passed_time":0.02701407608,"remaining_time":0.744816669}, -{"learn":[1484.889881],"iteration":35,"passed_time":0.02773222138,"remaining_time":0.7426072615}, -{"learn":[1452.681436],"iteration":36,"passed_time":0.02922225711,"remaining_time":0.7605684755}, -{"learn":[1419.998505],"iteration":37,"passed_time":0.02993859448,"remaining_time":0.757919155}, -{"learn":[1389.412263],"iteration":38,"passed_time":0.03066495293,"remaining_time":0.7556158915}, -{"learn":[1363.812251],"iteration":39,"passed_time":0.03156180218,"remaining_time":0.7574832522}, -{"learn":[1340.371116],"iteration":40,"passed_time":0.03238396535,"remaining_time":0.7574688481}, -{"learn":[1309.517514],"iteration":41,"passed_time":0.03325333631,"remaining_time":0.7584927663}, -{"learn":[1275.195616],"iteration":42,"passed_time":0.03400256544,"remaining_time":0.7567547703}, -{"learn":[1246.278496],"iteration":43,"passed_time":0.03486340917,"remaining_time":0.7574867992}, -{"learn":[1218.451233],"iteration":44,"passed_time":0.03552797424,"remaining_time":0.7539825644}, -{"learn":[1198.54566],"iteration":45,"passed_time":0.03633034132,"remaining_time":0.7534596875}, -{"learn":[1176.814775],"iteration":46,"passed_time":0.03704876297,"remaining_time":0.7512227895}, -{"learn":[1153.401899],"iteration":47,"passed_time":0.03778597756,"remaining_time":0.7494218882}, -{"learn":[1129.163213],"iteration":48,"passed_time":0.03849158877,"remaining_time":0.7470510392}, -{"learn":[1106.535124],"iteration":49,"passed_time":0.03923882159,"remaining_time":0.7455376102}, -{"learn":[1080.936692],"iteration":50,"passed_time":0.03996599195,"remaining_time":0.7436809091}, -{"learn":[1064.986137],"iteration":51,"passed_time":0.0406713514,"remaining_time":0.7414700217}, -{"learn":[1045.057952],"iteration":52,"passed_time":0.0413358652,"remaining_time":0.7385861196}, -{"learn":[1025.324028],"iteration":53,"passed_time":0.04207274968,"remaining_time":0.7370522444}, -{"learn":[1006.478968],"iteration":54,"passed_time":0.04277837514,"remaining_time":0.7350102637}, -{"learn":[987.1865749],"iteration":55,"passed_time":0.04349747337,"remaining_time":0.7332431226}, -{"learn":[975.2369814],"iteration":56,"passed_time":0.04425939328,"remaining_time":0.7322211906}, -{"learn":[955.5744159],"iteration":57,"passed_time":0.04501491118,"remaining_time":0.731104247}, -{"learn":[941.1777923],"iteration":58,"passed_time":0.04572484441,"remaining_time":0.7292725184}, -{"learn":[926.5090454],"iteration":59,"passed_time":0.04642100466,"remaining_time":0.7272624064}, -{"learn":[912.6126438],"iteration":60,"passed_time":0.04741235387,"remaining_time":0.7298393489}, -{"learn":[895.292879],"iteration":61,"passed_time":0.04832959069,"remaining_time":0.7311799365}, -{"learn":[878.3101311],"iteration":62,"passed_time":0.04915864551,"remaining_time":0.731137315}, -{"learn":[862.0548494],"iteration":63,"passed_time":0.05011315735,"remaining_time":0.7329049262}, -{"learn":[846.6412955],"iteration":64,"passed_time":0.05082763765,"remaining_time":0.7311360185}, -{"learn":[831.7313933],"iteration":65,"passed_time":0.05158707659,"remaining_time":0.730035296}, -{"learn":[816.1886926],"iteration":66,"passed_time":0.05234155289,"remaining_time":0.7288756544}, -{"learn":[804.041348],"iteration":67,"passed_time":0.05307378596,"remaining_time":0.7274230664}, -{"learn":[790.9289148],"iteration":68,"passed_time":0.05382215012,"remaining_time":0.726209011}, -{"learn":[780.494202],"iteration":69,"passed_time":0.05456257497,"remaining_time":0.7249027818}, -{"learn":[765.6299333],"iteration":70,"passed_time":0.05531889018,"remaining_time":0.7238204082}, -{"learn":[754.0364757],"iteration":71,"passed_time":0.05601013516,"remaining_time":0.7219084087}, -{"learn":[742.6843646],"iteration":72,"passed_time":0.05670059136,"remaining_time":0.7200198383}, -{"learn":[733.6486191],"iteration":73,"passed_time":0.05758347356,"remaining_time":0.7205715745}, -{"learn":[724.4082592],"iteration":74,"passed_time":0.05829821346,"remaining_time":0.7190112993}, -{"learn":[714.3612708],"iteration":75,"passed_time":0.05902747165,"remaining_time":0.7176497869}, -{"learn":[703.7413048],"iteration":76,"passed_time":0.05976955167,"remaining_time":0.7164583921}, -{"learn":[692.6766437],"iteration":77,"passed_time":0.06045693899,"remaining_time":0.7146320224}, -{"learn":[682.658587],"iteration":78,"passed_time":0.06116548947,"remaining_time":0.7130812127}, -{"learn":[672.2667398],"iteration":79,"passed_time":0.06194553036,"remaining_time":0.7123735991}, -{"learn":[662.2697796],"iteration":80,"passed_time":0.06277600995,"remaining_time":0.7122364586}, -{"learn":[653.6006303],"iteration":81,"passed_time":0.06359903311,"remaining_time":0.7119989317}, -{"learn":[647.5304061],"iteration":82,"passed_time":0.06463637793,"remaining_time":0.7141151634}, -{"learn":[640.5592447],"iteration":83,"passed_time":0.06550222134,"remaining_time":0.7142861279}, -{"learn":[630.0335231],"iteration":84,"passed_time":0.06639667009,"remaining_time":0.7147406251}, -{"learn":[625.4874873],"iteration":85,"passed_time":0.0671161661,"remaining_time":0.7133043699}, -{"learn":[617.861395],"iteration":86,"passed_time":0.06785444828,"remaining_time":0.7120817389}, -{"learn":[611.4852683],"iteration":87,"passed_time":0.06854632605,"remaining_time":0.7103891972}, -{"learn":[604.7049078],"iteration":88,"passed_time":0.06928889464,"remaining_time":0.7092380114}, -{"learn":[599.1922856],"iteration":89,"passed_time":0.07000813995,"remaining_time":0.7078600817}, -{"learn":[591.5441643],"iteration":90,"passed_time":0.07070041228,"remaining_time":0.7062271952}, -{"learn":[585.3771005],"iteration":91,"passed_time":0.07143795733,"remaining_time":0.7050615789}, -{"learn":[579.0166045],"iteration":92,"passed_time":0.07212528945,"remaining_time":0.7034154573}, -{"learn":[571.6345746],"iteration":93,"passed_time":0.07284266914,"remaining_time":0.702079343}, -{"learn":[563.7088364],"iteration":94,"passed_time":0.07353363671,"remaining_time":0.7005046444}, -{"learn":[558.8177793],"iteration":95,"passed_time":0.0742609018,"remaining_time":0.6992901586}, -{"learn":[552.8642371],"iteration":96,"passed_time":0.07496849045,"remaining_time":0.6979025451}, -{"learn":[545.7509079],"iteration":97,"passed_time":0.07572013466,"remaining_time":0.6969343007}, -{"learn":[539.9432238],"iteration":98,"passed_time":0.07642319369,"remaining_time":0.6955282577}, -{"learn":[534.1401375],"iteration":99,"passed_time":0.07713626988,"remaining_time":0.6942264289}, -{"learn":[527.7113585],"iteration":100,"passed_time":0.07787193222,"remaining_time":0.6931372977}, -{"learn":[523.0913521],"iteration":101,"passed_time":0.08010386955,"remaining_time":0.7052281849}, -{"learn":[518.1808209],"iteration":102,"passed_time":0.08095907155,"remaining_time":0.7050513319}, -{"learn":[515.1524114],"iteration":103,"passed_time":0.0818226238,"remaining_time":0.7049333742}, -{"learn":[510.9040838],"iteration":104,"passed_time":0.08256788025,"remaining_time":0.703792884}, -{"learn":[507.4108072],"iteration":105,"passed_time":0.08334949084,"remaining_time":0.7029664604}, -{"learn":[503.8051705],"iteration":106,"passed_time":0.0841848882,"remaining_time":0.7025897679}, -{"learn":[499.9996939],"iteration":107,"passed_time":0.08489968899,"remaining_time":0.7012085424}, -{"learn":[494.3140036],"iteration":108,"passed_time":0.08562285641,"remaining_time":0.6999079364}, -{"learn":[490.6963987],"iteration":109,"passed_time":0.0863491351,"remaining_time":0.6986430021}, -{"learn":[486.4031605],"iteration":110,"passed_time":0.08703576819,"remaining_time":0.6970702515}, -{"learn":[482.2785595],"iteration":111,"passed_time":0.0877364278,"remaining_time":0.6956245347}, -{"learn":[479.8949034],"iteration":112,"passed_time":0.08842896436,"remaining_time":0.6941282423}, -{"learn":[477.481256],"iteration":113,"passed_time":0.08909897138,"remaining_time":0.692470953}, -{"learn":[473.4848601],"iteration":114,"passed_time":0.08993526648,"remaining_time":0.692110529}, -{"learn":[470.8859057],"iteration":115,"passed_time":0.09073123441,"remaining_time":0.6914345795}, -{"learn":[467.34591],"iteration":116,"passed_time":0.09148530154,"remaining_time":0.6904403527}, -{"learn":[461.7794319],"iteration":117,"passed_time":0.09219658974,"remaining_time":0.689130442}, -{"learn":[458.8907088],"iteration":118,"passed_time":0.09289395398,"remaining_time":0.687727508}, -{"learn":[455.6145374],"iteration":119,"passed_time":0.09375794067,"remaining_time":0.6875582316}, -{"learn":[452.7000667],"iteration":120,"passed_time":0.09461301127,"remaining_time":0.6873127017}, -{"learn":[449.6290258],"iteration":121,"passed_time":0.09550207113,"remaining_time":0.6873017906}, -{"learn":[446.554963],"iteration":122,"passed_time":0.09645355147,"remaining_time":0.6877216637}, -{"learn":[444.1306578],"iteration":123,"passed_time":0.09718133433,"remaining_time":0.6865391038}, -{"learn":[442.339996],"iteration":124,"passed_time":0.09790930877,"remaining_time":0.6853651614}, -{"learn":[438.6627661],"iteration":125,"passed_time":0.09861097045,"remaining_time":0.6840157792}, -{"learn":[436.8974745],"iteration":126,"passed_time":0.09926481186,"remaining_time":0.6823478799}, -{"learn":[435.2446235],"iteration":127,"passed_time":0.09996990281,"remaining_time":0.6810449629}, -{"learn":[433.1007063],"iteration":128,"passed_time":0.1013340916,"remaining_time":0.6842015021}, -{"learn":[428.7936747],"iteration":129,"passed_time":0.1021133013,"remaining_time":0.683373632}, -{"learn":[425.4447815],"iteration":130,"passed_time":0.1028054622,"remaining_time":0.6819690584}, -{"learn":[422.4564141],"iteration":131,"passed_time":0.1035430461,"remaining_time":0.6808739696}, -{"learn":[420.3704177],"iteration":132,"passed_time":0.1042900125,"remaining_time":0.67984542}, -{"learn":[418.3308567],"iteration":133,"passed_time":0.1050084156,"remaining_time":0.6786364772}, -{"learn":[416.355286],"iteration":134,"passed_time":0.1057192836,"remaining_time":0.6773865211}, -{"learn":[413.2683151],"iteration":135,"passed_time":0.1064474661,"remaining_time":0.6762544902}, -{"learn":[409.2783872],"iteration":136,"passed_time":0.1071312215,"remaining_time":0.6748484974}, -{"learn":[407.2469786],"iteration":137,"passed_time":0.1078246622,"remaining_time":0.6735134696}, -{"learn":[404.9461056],"iteration":138,"passed_time":0.1087250237,"remaining_time":0.6734693912}, -{"learn":[402.902743],"iteration":139,"passed_time":0.1095788689,"remaining_time":0.6731273376}, -{"learn":[401.1939857],"iteration":140,"passed_time":0.1104375853,"remaining_time":0.6728077005}, -{"learn":[399.2151519],"iteration":141,"passed_time":0.1113094682,"remaining_time":0.6725600261}, -{"learn":[396.7982124],"iteration":142,"passed_time":0.1121055707,"remaining_time":0.6718494693}, -{"learn":[395.1551058],"iteration":143,"passed_time":0.1130211107,"remaining_time":0.6718477137}, -{"learn":[393.8379304],"iteration":144,"passed_time":0.1139175889,"remaining_time":0.6717209552}, -{"learn":[392.1757867],"iteration":145,"passed_time":0.115114552,"remaining_time":0.6733412836}, -{"learn":[390.0361905],"iteration":146,"passed_time":0.1158802594,"remaining_time":0.672420825}, -{"learn":[387.759463],"iteration":147,"passed_time":0.1168136102,"remaining_time":0.6724675399}, -{"learn":[385.5603947],"iteration":148,"passed_time":0.1176591074,"remaining_time":0.6719993313}, -{"learn":[383.4831287],"iteration":149,"passed_time":0.1184101035,"remaining_time":0.6709905863}, -{"learn":[380.7027889],"iteration":150,"passed_time":0.1191952897,"remaining_time":0.6701774896}, -{"learn":[379.1509263],"iteration":151,"passed_time":0.1199109166,"remaining_time":0.6689766927}, -{"learn":[377.5054091],"iteration":152,"passed_time":0.1206430913,"remaining_time":0.6678738453}, -{"learn":[376.3544887],"iteration":153,"passed_time":0.1213932363,"remaining_time":0.666874532}, -{"learn":[374.7216306],"iteration":154,"passed_time":0.122091681,"remaining_time":0.6655965834}, -{"learn":[371.8227673],"iteration":155,"passed_time":0.1228154776,"remaining_time":0.6644632251}, -{"learn":[368.8293011],"iteration":156,"passed_time":0.1234830268,"remaining_time":0.6630330677}, -{"learn":[367.3554223],"iteration":157,"passed_time":0.124250894,"remaining_time":0.6621471693}, -{"learn":[365.6957606],"iteration":158,"passed_time":0.125082188,"remaining_time":0.6615982397}, -{"learn":[363.4291725],"iteration":159,"passed_time":0.1259223265,"remaining_time":0.6610922142}, -{"learn":[362.1293855],"iteration":160,"passed_time":0.1267673646,"remaining_time":0.660607571}, -{"learn":[359.8814575],"iteration":161,"passed_time":0.1275243835,"remaining_time":0.659663169}, -{"learn":[357.4070103],"iteration":162,"passed_time":0.1282479928,"remaining_time":0.6585495093}, -{"learn":[355.2431096],"iteration":163,"passed_time":0.128942859,"remaining_time":0.6572940863}, -{"learn":[352.552065],"iteration":164,"passed_time":0.1297510463,"remaining_time":0.656618931}, -{"learn":[349.7138766],"iteration":165,"passed_time":0.1305558509,"remaining_time":0.6559251784}, -{"learn":[347.4891863],"iteration":166,"passed_time":0.1312337274,"remaining_time":0.6545969756}, -{"learn":[345.5489662],"iteration":167,"passed_time":0.1318980215,"remaining_time":0.6532092492}, -{"learn":[341.5699121],"iteration":168,"passed_time":0.1326245384,"remaining_time":0.6521360438}, -{"learn":[338.2907206],"iteration":169,"passed_time":0.1333184203,"remaining_time":0.6509075815}, -{"learn":[336.0732474],"iteration":170,"passed_time":0.1339855967,"remaining_time":0.6495559043}, -{"learn":[334.6253265],"iteration":171,"passed_time":0.1346695971,"remaining_time":0.6482931767}, -{"learn":[333.6199762],"iteration":172,"passed_time":0.1353404776,"remaining_time":0.646974422}, -{"learn":[331.927306],"iteration":173,"passed_time":0.1360180216,"remaining_time":0.6456947461}, -{"learn":[330.7664346],"iteration":174,"passed_time":0.1367687727,"remaining_time":0.6447670713}, -{"learn":[327.600505],"iteration":175,"passed_time":0.1374761968,"remaining_time":0.6436385578}, -{"learn":[325.6325653],"iteration":176,"passed_time":0.13820997,"remaining_time":0.6426373183}, -{"learn":[324.3582431],"iteration":177,"passed_time":0.138919102,"remaining_time":0.6415252914}, -{"learn":[323.0986648],"iteration":178,"passed_time":0.1398843165,"remaining_time":0.6415923121}, -{"learn":[321.3015869],"iteration":179,"passed_time":0.140964175,"remaining_time":0.6421701305}, -{"learn":[319.9619277],"iteration":180,"passed_time":0.1418238739,"remaining_time":0.6417334402}, -{"learn":[318.5357961],"iteration":181,"passed_time":0.1426653755,"remaining_time":0.6412103142}, -{"learn":[317.3093454],"iteration":182,"passed_time":0.1433921634,"remaining_time":0.6401715712}, -{"learn":[315.6279442],"iteration":183,"passed_time":0.1442559361,"remaining_time":0.6397437166}, -{"learn":[313.5985253],"iteration":184,"passed_time":0.1449686149,"remaining_time":0.6386455196}, -{"learn":[312.2128824],"iteration":185,"passed_time":0.1456340086,"remaining_time":0.6373445323}, -{"learn":[310.5408064],"iteration":186,"passed_time":0.1463235927,"remaining_time":0.6361555127}, -{"learn":[308.8097527],"iteration":187,"passed_time":0.1469707465,"remaining_time":0.6347885435}, -{"learn":[307.444303],"iteration":188,"passed_time":0.1476244587,"remaining_time":0.6334573332}, -{"learn":[305.5703591],"iteration":189,"passed_time":0.1483259601,"remaining_time":0.6323369878}, -{"learn":[304.1410151],"iteration":190,"passed_time":0.1498676659,"remaining_time":0.6347797996}, -{"learn":[302.5345375],"iteration":191,"passed_time":0.150515814,"remaining_time":0.6334207171}, -{"learn":[301.645134],"iteration":192,"passed_time":0.1511993393,"remaining_time":0.6322169267}, -{"learn":[300.4121176],"iteration":193,"passed_time":0.1518983587,"remaining_time":0.6310828719}, -{"learn":[298.5525362],"iteration":194,"passed_time":0.1525351389,"remaining_time":0.6296963427}, -{"learn":[296.9420221],"iteration":195,"passed_time":0.1532716157,"remaining_time":0.6287264235}, -{"learn":[295.9578764],"iteration":196,"passed_time":0.1540105047,"remaining_time":0.6277687068}, -{"learn":[295.0625029],"iteration":197,"passed_time":0.1548134778,"remaining_time":0.6270727738}, -{"learn":[294.1030487],"iteration":198,"passed_time":0.1555906158,"remaining_time":0.626271775}, -{"learn":[293.6409424],"iteration":199,"passed_time":0.1561267846,"remaining_time":0.6245071382}, -{"learn":[292.418178],"iteration":200,"passed_time":0.1571125661,"remaining_time":0.6245419916}, -{"learn":[290.7077158],"iteration":201,"passed_time":0.1583754466,"remaining_time":0.6256614177}, -{"learn":[289.7255389],"iteration":202,"passed_time":0.1590543633,"remaining_time":0.6244646678}, -{"learn":[288.2508488],"iteration":203,"passed_time":0.1598003373,"remaining_time":0.6235346495}, -{"learn":[286.7868578],"iteration":204,"passed_time":0.1605520332,"remaining_time":0.6226286164}, -{"learn":[286.086537],"iteration":205,"passed_time":0.161274874,"remaining_time":0.621612864}, -{"learn":[285.2264005],"iteration":206,"passed_time":0.1619637449,"remaining_time":0.6204698052}, -{"learn":[284.0864334],"iteration":207,"passed_time":0.1626876804,"remaining_time":0.6194646292}, -{"learn":[282.8510716],"iteration":208,"passed_time":0.1634216694,"remaining_time":0.6185001938}, -{"learn":[282.0865003],"iteration":209,"passed_time":0.1641738645,"remaining_time":0.6176064427}, -{"learn":[281.1298771],"iteration":210,"passed_time":0.164925862,"remaining_time":0.6167132943}, -{"learn":[280.1903001],"iteration":211,"passed_time":0.1655857375,"remaining_time":0.6154790622}, -{"learn":[279.0970471],"iteration":212,"passed_time":0.1663062143,"remaining_time":0.6144741344}, -{"learn":[277.9367672],"iteration":213,"passed_time":0.1669812131,"remaining_time":0.6133048295}, -{"learn":[276.7751341],"iteration":214,"passed_time":0.167689421,"remaining_time":0.6122613745}, -{"learn":[275.9816879],"iteration":215,"passed_time":0.1683824116,"remaining_time":0.6111657903}, -{"learn":[275.0941078],"iteration":216,"passed_time":0.1691116563,"remaining_time":0.6102047321}, -{"learn":[274.0984843],"iteration":217,"passed_time":0.1697642068,"remaining_time":0.6089706868}, -{"learn":[273.783232],"iteration":218,"passed_time":0.1705968027,"remaining_time":0.6083840316}, -{"learn":[272.2569337],"iteration":219,"passed_time":0.1714172741,"remaining_time":0.6077521535}, -{"learn":[271.4275366],"iteration":220,"passed_time":0.1721732884,"remaining_time":0.6068913649}, -{"learn":[268.932575],"iteration":221,"passed_time":0.1731690055,"remaining_time":0.6068715599}, -{"learn":[267.6229378],"iteration":222,"passed_time":0.1739672201,"remaining_time":0.6061548431}, -{"learn":[266.5785284],"iteration":223,"passed_time":0.1746886305,"remaining_time":0.6051713271}, -{"learn":[265.398546],"iteration":224,"passed_time":0.1754056733,"remaining_time":0.604175097}, -{"learn":[263.8963923],"iteration":225,"passed_time":0.1761441122,"remaining_time":0.6032546143}, -{"learn":[262.8240575],"iteration":226,"passed_time":0.1769197762,"remaining_time":0.6024624979}, -{"learn":[261.7816442],"iteration":227,"passed_time":0.1776571386,"remaining_time":0.6015408377}, -{"learn":[260.8591568],"iteration":228,"passed_time":0.1785285287,"remaining_time":0.6010720332}, -{"learn":[259.7074649],"iteration":229,"passed_time":0.1794276513,"remaining_time":0.6006925716}, -{"learn":[258.3753881],"iteration":230,"passed_time":0.1803341217,"remaining_time":0.6003330717}, -{"learn":[257.5815782],"iteration":231,"passed_time":0.181110155,"remaining_time":0.5995370648}, -{"learn":[256.6962939],"iteration":232,"passed_time":0.1819193703,"remaining_time":0.5988504592}, -{"learn":[255.5210063],"iteration":233,"passed_time":0.1826910584,"remaining_time":0.5980399605}, -{"learn":[254.8841884],"iteration":234,"passed_time":0.1835126522,"remaining_time":0.5973922508}, -{"learn":[254.0187639],"iteration":235,"passed_time":0.1843096525,"remaining_time":0.5966634512}, -{"learn":[252.9263803],"iteration":236,"passed_time":0.185212695,"remaining_time":0.5962754697}, -{"learn":[251.9325468],"iteration":237,"passed_time":0.1861036369,"remaining_time":0.5958444173}, -{"learn":[250.643096],"iteration":238,"passed_time":0.1870425472,"remaining_time":0.5955622527}, -{"learn":[249.6721803],"iteration":239,"passed_time":0.187933285,"remaining_time":0.5951220691}, -{"learn":[248.7723228],"iteration":240,"passed_time":0.1887522215,"remaining_time":0.5944520172}, -{"learn":[248.0043812],"iteration":241,"passed_time":0.1896254224,"remaining_time":0.5939507031}, -{"learn":[247.3321548],"iteration":242,"passed_time":0.1904597995,"remaining_time":0.5933253836}, -{"learn":[246.7967875],"iteration":243,"passed_time":0.1912322949,"remaining_time":0.5925066187}, -{"learn":[246.0910573],"iteration":244,"passed_time":0.1919510759,"remaining_time":0.5915227032}, -{"learn":[243.8962219],"iteration":245,"passed_time":0.1929246549,"remaining_time":0.5913219096}, -{"learn":[242.8598926],"iteration":246,"passed_time":0.1937343441,"remaining_time":0.5906152272}, -{"learn":[242.4047449],"iteration":247,"passed_time":0.1945199837,"remaining_time":0.5898347892}, -{"learn":[241.8658878],"iteration":248,"passed_time":0.1953305322,"remaining_time":0.5891294365}, -{"learn":[241.2068841],"iteration":249,"passed_time":0.1961082018,"remaining_time":0.5883246054}, -{"learn":[240.3944328],"iteration":250,"passed_time":0.1970050218,"remaining_time":0.5878755433}, -{"learn":[239.8042636],"iteration":251,"passed_time":0.1977707763,"remaining_time":0.5870338914}, -{"learn":[239.2380851],"iteration":252,"passed_time":0.198508332,"remaining_time":0.586109581}, -{"learn":[238.7913906],"iteration":253,"passed_time":0.1992780534,"remaining_time":0.5852812119}, -{"learn":[237.7871852],"iteration":254,"passed_time":0.2001830495,"remaining_time":0.5848485173}, -{"learn":[237.4230529],"iteration":255,"passed_time":0.2014230334,"remaining_time":0.5853856907}, -{"learn":[236.9503851],"iteration":256,"passed_time":0.2024117442,"remaining_time":0.5851825912}, -{"learn":[236.5353735],"iteration":257,"passed_time":0.203289636,"remaining_time":0.5846546896}, -{"learn":[236.0385457],"iteration":258,"passed_time":0.2043241206,"remaining_time":0.5845720979}, -{"learn":[235.1681646],"iteration":259,"passed_time":0.2050800245,"remaining_time":0.5836893005}, -{"learn":[233.9263754],"iteration":260,"passed_time":0.2058911784,"remaining_time":0.5829639113}, -{"learn":[233.4651113],"iteration":261,"passed_time":0.2066417155,"remaining_time":0.5820671223}, -{"learn":[232.8769228],"iteration":262,"passed_time":0.207408686,"remaining_time":0.5812174966}, -{"learn":[232.3156672],"iteration":263,"passed_time":0.2081722358,"remaining_time":0.5803589604}, -{"learn":[231.9137269],"iteration":264,"passed_time":0.2089940995,"remaining_time":0.5796628797}, -{"learn":[231.4086531],"iteration":265,"passed_time":0.2098098592,"remaining_time":0.5789490101}, -{"learn":[230.7801018],"iteration":266,"passed_time":0.2105362878,"remaining_time":0.5779891348}, -{"learn":[230.3125671],"iteration":267,"passed_time":0.2113451217,"remaining_time":0.5772560788}, -{"learn":[229.8171138],"iteration":268,"passed_time":0.2121952372,"remaining_time":0.5766346408}, -{"learn":[228.9896699],"iteration":269,"passed_time":0.2129847411,"remaining_time":0.5758476334}, -{"learn":[228.1907234],"iteration":270,"passed_time":0.2138539152,"remaining_time":0.5752749231}, -{"learn":[227.5037554],"iteration":271,"passed_time":0.214660443,"remaining_time":0.574532362}, -{"learn":[227.0812216],"iteration":272,"passed_time":0.2154085642,"remaining_time":0.5736337956}, -{"learn":[226.531358],"iteration":273,"passed_time":0.2161630274,"remaining_time":0.5727531309}, -{"learn":[225.9401478],"iteration":274,"passed_time":0.2173153224,"remaining_time":0.5729222136}, -{"learn":[225.5392312],"iteration":275,"passed_time":0.2183920375,"remaining_time":0.5728834608}, -{"learn":[224.4404722],"iteration":276,"passed_time":0.2195621124,"remaining_time":0.5730808927}, -{"learn":[222.9875036],"iteration":277,"passed_time":0.2206099298,"remaining_time":0.5729509689}, -{"learn":[222.0576336],"iteration":278,"passed_time":0.2220810074,"remaining_time":0.5739082664}, -{"learn":[221.4001301],"iteration":279,"passed_time":0.2228432026,"remaining_time":0.573025378}, -{"learn":[220.6016936],"iteration":280,"passed_time":0.2235654448,"remaining_time":0.5720411204}, -{"learn":[220.0410548],"iteration":281,"passed_time":0.2242912506,"remaining_time":0.5710677941}, -{"learn":[219.6337237],"iteration":282,"passed_time":0.2250029287,"remaining_time":0.5700604237}, -{"learn":[219.3044009],"iteration":283,"passed_time":0.2257394625,"remaining_time":0.5691177997}, -{"learn":[218.776607],"iteration":284,"passed_time":0.226414503,"remaining_time":0.5680223496}, -{"learn":[217.8416864],"iteration":285,"passed_time":0.2270884581,"remaining_time":0.5669271297}, -{"learn":[217.4529483],"iteration":286,"passed_time":0.227759198,"remaining_time":0.5658268578}, -{"learn":[217.0905909],"iteration":287,"passed_time":0.2284721528,"remaining_time":0.5648339332}, -{"learn":[216.0690431],"iteration":288,"passed_time":0.2292173387,"remaining_time":0.5639222416}, -{"learn":[215.7059666],"iteration":289,"passed_time":0.2299477881,"remaining_time":0.5629756191}, -{"learn":[215.2980626],"iteration":290,"passed_time":0.2306771118,"remaining_time":0.5620277397}, -{"learn":[214.6205749],"iteration":291,"passed_time":0.2313760475,"remaining_time":0.5610076768}, -{"learn":[214.3146893],"iteration":292,"passed_time":0.23417155,"remaining_time":0.5650487572}, -{"learn":[213.4974494],"iteration":293,"passed_time":0.2349282633,"remaining_time":0.5641474623}, -{"learn":[213.0350907],"iteration":294,"passed_time":0.2356636625,"remaining_time":0.5631962104}, -{"learn":[212.6353058],"iteration":295,"passed_time":0.2363248571,"remaining_time":0.5620699304}, -{"learn":[211.3935718],"iteration":296,"passed_time":0.2370575987,"remaining_time":0.5611161342}, -{"learn":[210.420639],"iteration":297,"passed_time":0.2378018762,"remaining_time":0.5601909969}, -{"learn":[209.6471807],"iteration":298,"passed_time":0.2385106368,"remaining_time":0.5591838006}, -{"learn":[209.321792],"iteration":299,"passed_time":0.2391724057,"remaining_time":0.5580689467}, -{"learn":[208.5385184],"iteration":300,"passed_time":0.2398697443,"remaining_time":0.5570397053}, -{"learn":[207.4529122],"iteration":301,"passed_time":0.2405938982,"remaining_time":0.5560746388}, -{"learn":[207.1634334],"iteration":302,"passed_time":0.2412924308,"remaining_time":0.5550522252}, -{"learn":[206.7215229],"iteration":303,"passed_time":0.2419842067,"remaining_time":0.5540164732}, -{"learn":[205.8254902],"iteration":304,"passed_time":0.2426827717,"remaining_time":0.552998447}, -{"learn":[204.9402971],"iteration":305,"passed_time":0.2435071858,"remaining_time":0.5522679312}, -{"learn":[204.2401925],"iteration":306,"passed_time":0.2442052594,"remaining_time":0.5512516116}, -{"learn":[203.830478],"iteration":307,"passed_time":0.2448946501,"remaining_time":0.5502178503}, -{"learn":[203.4878609],"iteration":308,"passed_time":0.2456328511,"remaining_time":0.5492954697}, -{"learn":[202.7358657],"iteration":309,"passed_time":0.2463957044,"remaining_time":0.5484291485}, -{"learn":[201.6776776],"iteration":310,"passed_time":0.2471161637,"remaining_time":0.5474695716}, -{"learn":[201.2502754],"iteration":311,"passed_time":0.2480671964,"remaining_time":0.5470199715}, -{"learn":[200.4058849],"iteration":312,"passed_time":0.248785112,"remaining_time":0.5460555014}, -{"learn":[200.0863728],"iteration":313,"passed_time":0.2495481778,"remaining_time":0.545191242}, -{"learn":[199.4424797],"iteration":314,"passed_time":0.2504540849,"remaining_time":0.5446382481}, -{"learn":[198.434697],"iteration":315,"passed_time":0.2512015403,"remaining_time":0.5437400429}, -{"learn":[197.6092453],"iteration":316,"passed_time":0.2519345938,"remaining_time":0.5428117589}, -{"learn":[197.4347459],"iteration":317,"passed_time":0.2522956799,"remaining_time":0.5410869612}, -{"learn":[197.1730434],"iteration":318,"passed_time":0.2530093233,"remaining_time":0.5401233517}, -{"learn":[196.3205483],"iteration":319,"passed_time":0.2537239044,"remaining_time":0.5391632969}, -{"learn":[196.1442192],"iteration":320,"passed_time":0.2544253026,"remaining_time":0.5381768861}, -{"learn":[195.1358489],"iteration":321,"passed_time":0.2551259045,"remaining_time":0.5371905691}, -{"learn":[194.7654394],"iteration":322,"passed_time":0.2557806094,"remaining_time":0.5361098223}, -{"learn":[194.5257029],"iteration":323,"passed_time":0.2564364386,"remaining_time":0.5350340509}, -{"learn":[194.3460045],"iteration":324,"passed_time":0.2571010115,"remaining_time":0.5339790239}, -{"learn":[193.5423137],"iteration":325,"passed_time":0.2577692722,"remaining_time":0.5329340168}, -{"learn":[193.361594],"iteration":326,"passed_time":0.2584108092,"remaining_time":0.5318363138}, -{"learn":[192.8609113],"iteration":327,"passed_time":0.2591080869,"remaining_time":0.5308555927}, -{"learn":[192.0152368],"iteration":328,"passed_time":0.259827105,"remaining_time":0.5299209346}, -{"learn":[191.2984363],"iteration":329,"passed_time":0.2605309884,"remaining_time":0.5289568553}, -{"learn":[190.7528647],"iteration":330,"passed_time":0.2612934215,"remaining_time":0.5281126857}, -{"learn":[190.0073779],"iteration":331,"passed_time":0.2620074777,"remaining_time":0.5271716719}, -{"learn":[189.1785006],"iteration":332,"passed_time":0.2626834993,"remaining_time":0.5261558379}, -{"learn":[188.5504856],"iteration":333,"passed_time":0.2649155206,"remaining_time":0.5282447208}, -{"learn":[188.0987857],"iteration":334,"passed_time":0.265882104,"remaining_time":0.5277958184}, -{"learn":[187.3100528],"iteration":335,"passed_time":0.266580835,"remaining_time":0.5268145072}, -{"learn":[187.0695173],"iteration":336,"passed_time":0.2673128519,"remaining_time":0.5259003584}, -{"learn":[186.3384284],"iteration":337,"passed_time":0.2681301671,"remaining_time":0.5251543509}, -{"learn":[185.8397918],"iteration":338,"passed_time":0.268867105,"remaining_time":0.5242511988}, -{"learn":[185.5281498],"iteration":339,"passed_time":0.2694914785,"remaining_time":0.5231305172}, -{"learn":[185.0772929],"iteration":340,"passed_time":0.2702359925,"remaining_time":0.522244924}, -{"learn":[184.3896485],"iteration":341,"passed_time":0.2709851814,"remaining_time":0.5213691502}, -{"learn":[183.9436706],"iteration":342,"passed_time":0.2716560669,"remaining_time":0.5203441282}, -{"learn":[182.943391],"iteration":343,"passed_time":0.2723687927,"remaining_time":0.5194009536}, -{"learn":[182.1191152],"iteration":344,"passed_time":0.2730501676,"remaining_time":0.5183995936}, -{"learn":[181.9430507],"iteration":345,"passed_time":0.2737274762,"remaining_time":0.5173923971}, -{"learn":[181.2225343],"iteration":346,"passed_time":0.2744020669,"remaining_time":0.5163819876}, -{"learn":[180.7098556],"iteration":347,"passed_time":0.2750624311,"remaining_time":0.5153468536}, -{"learn":[180.1134451],"iteration":348,"passed_time":0.2757285085,"remaining_time":0.5143245245}, -{"learn":[179.9030133],"iteration":349,"passed_time":0.2764445126,"remaining_time":0.5133969519}, -{"learn":[179.5222111],"iteration":350,"passed_time":0.2771355895,"remaining_time":0.5124244945}, -{"learn":[178.6909028],"iteration":351,"passed_time":0.2781819337,"remaining_time":0.5121076507}, -{"learn":[178.3826558],"iteration":352,"passed_time":0.2790138174,"remaining_time":0.5113935973}, -{"learn":[178.0647718],"iteration":353,"passed_time":0.2797921853,"remaining_time":0.5105812195}, -{"learn":[177.0587936],"iteration":354,"passed_time":0.2809552339,"remaining_time":0.5104679602}, -{"learn":[176.7722999],"iteration":355,"passed_time":0.2816649649,"remaining_time":0.5095287567}, -{"learn":[176.4841216],"iteration":356,"passed_time":0.2823865309,"remaining_time":0.5086121551}, -{"learn":[176.0194343],"iteration":357,"passed_time":0.2831539227,"remaining_time":0.5077788223}, -{"learn":[175.7636755],"iteration":358,"passed_time":0.2839027184,"remaining_time":0.5069126533}, -{"learn":[175.0494995],"iteration":359,"passed_time":0.2846329524,"remaining_time":0.5060141375}, -{"learn":[174.8436133],"iteration":360,"passed_time":0.285326012,"remaining_time":0.5050507526}, -{"learn":[174.5320971],"iteration":361,"passed_time":0.2860244795,"remaining_time":0.504098392}, -{"learn":[173.8910897],"iteration":362,"passed_time":0.2866973695,"remaining_time":0.5031025465}, -{"learn":[173.2930522],"iteration":363,"passed_time":0.2874082129,"remaining_time":0.5021747896}, -{"learn":[172.7834317],"iteration":364,"passed_time":0.2881249894,"remaining_time":0.5012585432}, -{"learn":[172.5451464],"iteration":365,"passed_time":0.2888148206,"remaining_time":0.5002967112}, -{"learn":[172.2138011],"iteration":366,"passed_time":0.2894541694,"remaining_time":0.4992492894}, -{"learn":[171.2132824],"iteration":367,"passed_time":0.2901284169,"remaining_time":0.4982640203}, -{"learn":[171.050042],"iteration":368,"passed_time":0.2908197341,"remaining_time":0.4973096267}, -{"learn":[170.643188],"iteration":369,"passed_time":0.2915489378,"remaining_time":0.4964211644}, -{"learn":[170.2444386],"iteration":370,"passed_time":0.2922851539,"remaining_time":0.4955454497}, -{"learn":[169.6615289],"iteration":371,"passed_time":0.292994688,"remaining_time":0.494625441}, -{"learn":[169.2398256],"iteration":372,"passed_time":0.2936843023,"remaining_time":0.4936730766}, -{"learn":[169.1089687],"iteration":373,"passed_time":0.2964977581,"remaining_time":0.4962769962}, -{"learn":[168.3913973],"iteration":374,"passed_time":0.2972895888,"remaining_time":0.4954826481}, -{"learn":[168.0109707],"iteration":375,"passed_time":0.2979909083,"remaining_time":0.4945381031}, -{"learn":[167.6623955],"iteration":376,"passed_time":0.2989716806,"remaining_time":0.4940566498}, -{"learn":[167.4718941],"iteration":377,"passed_time":0.2997289156,"remaining_time":0.4932047235}, -{"learn":[167.1617279],"iteration":378,"passed_time":0.3004271163,"remaining_time":0.4922565679}, -{"learn":[166.8382257],"iteration":379,"passed_time":0.3011389988,"remaining_time":0.4913320507}, -{"learn":[166.5470885],"iteration":380,"passed_time":0.3019178179,"remaining_time":0.4905173998}, -{"learn":[165.9805333],"iteration":381,"passed_time":0.302612932,"remaining_time":0.4895675182}, -{"learn":[165.8072213],"iteration":382,"passed_time":0.3033300382,"remaining_time":0.4886543957}, -{"learn":[165.2007912],"iteration":383,"passed_time":0.3040284921,"remaining_time":0.4877123727}, -{"learn":[164.7921382],"iteration":384,"passed_time":0.3046742614,"remaining_time":0.4866874564}, -{"learn":[164.5110541],"iteration":385,"passed_time":0.3053264024,"remaining_time":0.48567464}, -{"learn":[163.8643792],"iteration":386,"passed_time":0.3061135126,"remaining_time":0.484877476}, -{"learn":[163.164661],"iteration":387,"passed_time":0.3068363396,"remaining_time":0.4839789686}, -{"learn":[162.7088389],"iteration":388,"passed_time":0.3075190235,"remaining_time":0.483018312}, -{"learn":[162.0327601],"iteration":389,"passed_time":0.3082005155,"remaining_time":0.4820572166}, -{"learn":[161.7854959],"iteration":390,"passed_time":0.3094010582,"remaining_time":0.481905996}, -{"learn":[161.3755056],"iteration":391,"passed_time":0.3102488967,"remaining_time":0.4812023704}, -{"learn":[160.7323132],"iteration":392,"passed_time":0.3114061661,"remaining_time":0.480975936}, -{"learn":[160.6155798],"iteration":393,"passed_time":0.3120978922,"remaining_time":0.4800287377}, -{"learn":[160.3386438],"iteration":394,"passed_time":0.3128182977,"remaining_time":0.4791267597}, -{"learn":[159.8353012],"iteration":395,"passed_time":0.3135830746,"remaining_time":0.4782933764}, -{"learn":[159.1391019],"iteration":396,"passed_time":0.3142885811,"remaining_time":0.4773703134}, -{"learn":[158.4855614],"iteration":397,"passed_time":0.3151278956,"remaining_time":0.4766507366}, -{"learn":[158.0854374],"iteration":398,"passed_time":0.3158325824,"remaining_time":0.4757277744}, -{"learn":[157.8456813],"iteration":399,"passed_time":0.3165535651,"remaining_time":0.4748303477}, -{"learn":[157.4880514],"iteration":400,"passed_time":0.3172745179,"remaining_time":0.4739337562}, -{"learn":[156.9881906],"iteration":401,"passed_time":0.317999583,"remaining_time":0.4730441558}, -{"learn":[156.5677653],"iteration":402,"passed_time":0.31869001,"remaining_time":0.4721040595}, -{"learn":[156.3574827],"iteration":403,"passed_time":0.3194012099,"remaining_time":0.4711958443}, -{"learn":[156.2451016],"iteration":404,"passed_time":0.3200784444,"remaining_time":0.4702387022}, -{"learn":[155.9302814],"iteration":405,"passed_time":0.3208048281,"remaining_time":0.469354847}, -{"learn":[155.3032846],"iteration":406,"passed_time":0.3214914591,"remaining_time":0.4684138458}, -{"learn":[154.4769299],"iteration":407,"passed_time":0.3222114867,"remaining_time":0.4675225494}, -{"learn":[154.2897312],"iteration":408,"passed_time":0.3228884474,"remaining_time":0.4665698592}, -{"learn":[154.1934428],"iteration":409,"passed_time":0.3235838338,"remaining_time":0.4656450292}, -{"learn":[153.5724007],"iteration":410,"passed_time":0.3243219301,"remaining_time":0.4647825227}, -{"learn":[153.0736612],"iteration":411,"passed_time":0.3254188997,"remaining_time":0.4644327985}, -{"learn":[152.9564476],"iteration":412,"passed_time":0.3261923434,"remaining_time":0.4636196261}, -{"learn":[152.3993793],"iteration":413,"passed_time":0.3270152063,"remaining_time":0.4628765964}, -{"learn":[151.7975618],"iteration":414,"passed_time":0.3279057041,"remaining_time":0.4622285227}, -{"learn":[151.2709445],"iteration":415,"passed_time":0.3286975612,"remaining_time":0.4614408071}, -{"learn":[150.9104339],"iteration":416,"passed_time":0.3293475474,"remaining_time":0.4604547246}, -{"learn":[150.3215665],"iteration":417,"passed_time":0.3300545709,"remaining_time":0.4595496658}, -{"learn":[149.8387959],"iteration":418,"passed_time":0.33077865,"remaining_time":0.458669202}, -{"learn":[149.54926],"iteration":419,"passed_time":0.3314492353,"remaining_time":0.4577156107}, -{"learn":[149.1502195],"iteration":420,"passed_time":0.3321662222,"remaining_time":0.4568271797}, -{"learn":[148.9274928],"iteration":421,"passed_time":0.3328315807,"remaining_time":0.4558688475}, -{"learn":[148.6005806],"iteration":422,"passed_time":0.333523222,"remaining_time":0.454947752}, -{"learn":[148.2908845],"iteration":423,"passed_time":0.3342240753,"remaining_time":0.4540402533}, -{"learn":[147.9269959],"iteration":424,"passed_time":0.3349316394,"remaining_time":0.4531428063}, -{"learn":[147.4632561],"iteration":425,"passed_time":0.3356489336,"remaining_time":0.4522593613}, -{"learn":[146.8663669],"iteration":426,"passed_time":0.3363175341,"remaining_time":0.4513113513}, -{"learn":[146.5109632],"iteration":427,"passed_time":0.3370133065,"remaining_time":0.4504009611}, -{"learn":[146.1289295],"iteration":428,"passed_time":0.3376868233,"remaining_time":0.449461949}, -{"learn":[145.7040286],"iteration":429,"passed_time":0.3383919288,"remaining_time":0.4485660452}, -{"learn":[145.2840842],"iteration":430,"passed_time":0.3390635889,"remaining_time":0.4476268726}, -{"learn":[145.0408079],"iteration":431,"passed_time":0.3398307724,"remaining_time":0.446814534}, -{"learn":[144.7811929],"iteration":432,"passed_time":0.3406539426,"remaining_time":0.446075717}, -{"learn":[144.3322422],"iteration":433,"passed_time":0.3414858573,"remaining_time":0.4453479152}, -{"learn":[144.2214218],"iteration":434,"passed_time":0.3423766474,"remaining_time":0.4446961053}, -{"learn":[143.9688619],"iteration":435,"passed_time":0.343363285,"remaining_time":0.4441671852}, -{"learn":[143.6083057],"iteration":436,"passed_time":0.3441564846,"remaining_time":0.4433869585}, -{"learn":[143.2156197],"iteration":437,"passed_time":0.3448755302,"remaining_time":0.442511525}, -{"learn":[142.8357081],"iteration":438,"passed_time":0.3456010422,"remaining_time":0.4416450675}, -{"learn":[142.5387687],"iteration":439,"passed_time":0.3463049516,"remaining_time":0.4407517565}, -{"learn":[142.2042503],"iteration":440,"passed_time":0.3469375272,"remaining_time":0.4397688837}, -{"learn":[142.0426301],"iteration":441,"passed_time":0.3476209558,"remaining_time":0.4388517948}, -{"learn":[141.8952894],"iteration":442,"passed_time":0.3481070373,"remaining_time":0.4376876293}, -{"learn":[141.6464236],"iteration":443,"passed_time":0.3487595953,"remaining_time":0.4367349888}, -{"learn":[141.5411481],"iteration":444,"passed_time":0.3496728627,"remaining_time":0.4361088512}, -{"learn":[141.0823141],"iteration":445,"passed_time":0.3503799578,"remaining_time":0.4352253287}, -{"learn":[140.4287564],"iteration":446,"passed_time":0.3511061425,"remaining_time":0.434366212}, -{"learn":[140.1803918],"iteration":447,"passed_time":0.3517713368,"remaining_time":0.4334325399}, -{"learn":[139.9958853],"iteration":448,"passed_time":0.3524543155,"remaining_time":0.4325218883}, -{"learn":[139.7426692],"iteration":449,"passed_time":0.3531524215,"remaining_time":0.4316307374}, -{"learn":[139.1196939],"iteration":450,"passed_time":0.3538057256,"remaining_time":0.4306859054}, -{"learn":[138.6960149],"iteration":451,"passed_time":0.354538981,"remaining_time":0.4298392956}, -{"learn":[138.4218897],"iteration":452,"passed_time":0.3554323105,"remaining_time":0.4291864765}, -{"learn":[138.2403828],"iteration":453,"passed_time":0.3562520743,"remaining_time":0.4284441246}, -{"learn":[138.0915511],"iteration":454,"passed_time":0.3571315749,"remaining_time":0.4277729854}, -{"learn":[137.9493266],"iteration":455,"passed_time":0.3579010513,"remaining_time":0.4269696753}, -{"learn":[137.7770819],"iteration":456,"passed_time":0.3586828724,"remaining_time":0.426181181}, -{"learn":[137.3442044],"iteration":457,"passed_time":0.3595071309,"remaining_time":0.4254429365}, -{"learn":[137.1171304],"iteration":458,"passed_time":0.3602601247,"remaining_time":0.4246203213}, -{"learn":[136.6867566],"iteration":459,"passed_time":0.3610113779,"remaining_time":0.4237959654}, -{"learn":[136.518458],"iteration":460,"passed_time":0.3617839417,"remaining_time":0.4229968429}, -{"learn":[136.0988944],"iteration":461,"passed_time":0.3626347744,"remaining_time":0.4222889797}, -{"learn":[136.0065977],"iteration":462,"passed_time":0.3634189378,"remaining_time":0.4215031741}, -{"learn":[135.9112619],"iteration":463,"passed_time":0.3642669063,"remaining_time":0.4207910815}, -{"learn":[135.6242991],"iteration":464,"passed_time":0.3657024719,"remaining_time":0.4207544569}, -{"learn":[135.5102452],"iteration":465,"passed_time":0.3663477897,"remaining_time":0.4198062654}, -{"learn":[135.1636411],"iteration":466,"passed_time":0.367122744,"remaining_time":0.4190073288}, -{"learn":[135.0992177],"iteration":467,"passed_time":0.3680082784,"remaining_time":0.4183341968}, -{"learn":[134.8365283],"iteration":468,"passed_time":0.3687949318,"remaining_time":0.4175482063}, -{"learn":[134.7463366],"iteration":469,"passed_time":0.3695548426,"remaining_time":0.4167320565}, -{"learn":[134.587287],"iteration":470,"passed_time":0.3703077014,"remaining_time":0.4159082252}, -{"learn":[134.4480214],"iteration":471,"passed_time":0.3710675734,"remaining_time":0.4150925397}, -{"learn":[133.9584806],"iteration":472,"passed_time":0.3718711566,"remaining_time":0.4143257918}, -{"learn":[133.8098951],"iteration":473,"passed_time":0.3727293313,"remaining_time":0.4136194689}, -{"learn":[133.4703971],"iteration":474,"passed_time":0.373516589,"remaining_time":0.4128341247}, -{"learn":[133.0607757],"iteration":475,"passed_time":0.3747673253,"remaining_time":0.4125589884}, -{"learn":[132.8214312],"iteration":476,"passed_time":0.3755628445,"remaining_time":0.4117806451}, -{"learn":[132.6378881],"iteration":477,"passed_time":0.3762671474,"remaining_time":0.4109026171}, -{"learn":[132.3500025],"iteration":478,"passed_time":0.3769785902,"remaining_time":0.4100330804}, -{"learn":[132.029824],"iteration":479,"passed_time":0.3777045391,"remaining_time":0.4091799174}, -{"learn":[131.802763],"iteration":480,"passed_time":0.3784316215,"remaining_time":0.4083285064}, -{"learn":[131.4477917],"iteration":481,"passed_time":0.3791212487,"remaining_time":0.4074373586}, -{"learn":[131.340861],"iteration":482,"passed_time":0.3797984811,"remaining_time":0.4065337779}, -{"learn":[131.138128],"iteration":483,"passed_time":0.3804936332,"remaining_time":0.4056502371}, -{"learn":[130.8254085],"iteration":484,"passed_time":0.3811535885,"remaining_time":0.4047300992}, -{"learn":[130.6226731],"iteration":485,"passed_time":0.3818615219,"remaining_time":0.4038617742}, -{"learn":[130.5353426],"iteration":486,"passed_time":0.3824941239,"remaining_time":0.4029147548}, -{"learn":[130.2002559],"iteration":487,"passed_time":0.3831915747,"remaining_time":0.402037062}, -{"learn":[129.8650409],"iteration":488,"passed_time":0.3839342028,"remaining_time":0.4012073162}, -{"learn":[129.7614838],"iteration":489,"passed_time":0.384632996,"remaining_time":0.400332302}, -{"learn":[129.7035868],"iteration":490,"passed_time":0.3854408813,"remaining_time":0.3995710969}, -{"learn":[129.55131],"iteration":491,"passed_time":0.3862470198,"remaining_time":0.3988078985}, -{"learn":[128.9822814],"iteration":492,"passed_time":0.3877103564,"remaining_time":0.3987203868}, -{"learn":[128.7051594],"iteration":493,"passed_time":0.3885107746,"remaining_time":0.3979482833}, -{"learn":[128.6011681],"iteration":494,"passed_time":0.3890285653,"remaining_time":0.3968877282}, -{"learn":[128.4308561],"iteration":495,"passed_time":0.3898226152,"remaining_time":0.3961100768}, -{"learn":[128.307177],"iteration":496,"passed_time":0.3904763826,"remaining_time":0.3951903832}, -{"learn":[128.0653125],"iteration":497,"passed_time":0.3912745466,"remaining_time":0.394417314}, -{"learn":[127.6727011],"iteration":498,"passed_time":0.3920671814,"remaining_time":0.3936385929}, -{"learn":[127.4025463],"iteration":499,"passed_time":0.3927746746,"remaining_time":0.3927746746}, -{"learn":[127.2504625],"iteration":500,"passed_time":0.3935051998,"remaining_time":0.3919343208}, -{"learn":[126.7573321],"iteration":501,"passed_time":0.3942010118,"remaining_time":0.3910599679}, -{"learn":[126.5927506],"iteration":502,"passed_time":0.3948737544,"remaining_time":0.3901635307}, -{"learn":[126.3386114],"iteration":503,"passed_time":0.3955091027,"remaining_time":0.3892311804}, -{"learn":[125.9602791],"iteration":504,"passed_time":0.3962031505,"remaining_time":0.3883575436}, -{"learn":[125.7382376],"iteration":505,"passed_time":0.3969743797,"remaining_time":0.3875599675}, -{"learn":[125.5238531],"iteration":506,"passed_time":0.397672074,"remaining_time":0.3866909911}, -{"learn":[125.4034315],"iteration":507,"passed_time":0.3983463546,"remaining_time":0.3858000128}, -{"learn":[125.108111],"iteration":508,"passed_time":0.399085609,"remaining_time":0.3849725619}, -{"learn":[124.888637],"iteration":509,"passed_time":0.3998180539,"remaining_time":0.3841389146}, -{"learn":[124.5410307],"iteration":510,"passed_time":0.4005124711,"remaining_time":0.3832692727}, -{"learn":[124.4532414],"iteration":511,"passed_time":0.4012213082,"remaining_time":0.3824140594}, -{"learn":[124.2402559],"iteration":512,"passed_time":0.4023745223,"remaining_time":0.3819812717}, -{"learn":[124.0493354],"iteration":513,"passed_time":0.4032401044,"remaining_time":0.3812737174}, -{"learn":[123.8903048],"iteration":514,"passed_time":0.4039687924,"remaining_time":0.3804366298}, -{"learn":[123.2513784],"iteration":515,"passed_time":0.4051548784,"remaining_time":0.3800289945}, -{"learn":[122.9060997],"iteration":516,"passed_time":0.4059148746,"remaining_time":0.3792202794}, -{"learn":[122.2659462],"iteration":517,"passed_time":0.4067721264,"remaining_time":0.3785022488}, -{"learn":[121.9153759],"iteration":518,"passed_time":0.4074913036,"remaining_time":0.3776557169}, -{"learn":[121.768553],"iteration":519,"passed_time":0.4081988823,"remaining_time":0.3767989683}, -{"learn":[121.5586427],"iteration":520,"passed_time":0.409995707,"remaining_time":0.3769442297}, -{"learn":[121.328656],"iteration":521,"passed_time":0.4107966725,"remaining_time":0.3761701331}, -{"learn":[121.2414438],"iteration":522,"passed_time":0.4115722618,"remaining_time":0.3753727894}, -{"learn":[120.9509013],"iteration":523,"passed_time":0.4123457076,"remaining_time":0.3745735817}, -{"learn":[120.7287231],"iteration":524,"passed_time":0.4131926733,"remaining_time":0.3738409902}, -{"learn":[120.5182445],"iteration":525,"passed_time":0.4139855734,"remaining_time":0.373059243}, -{"learn":[120.1552011],"iteration":526,"passed_time":0.4147480129,"remaining_time":0.372250114}, -{"learn":[119.8518006],"iteration":527,"passed_time":0.4155235975,"remaining_time":0.3714529129}, -{"learn":[119.5758902],"iteration":528,"passed_time":0.4162645711,"remaining_time":0.3706249773}, -{"learn":[119.4684522],"iteration":529,"passed_time":0.4170442563,"remaining_time":0.369831699}, -{"learn":[119.1905877],"iteration":530,"passed_time":0.4200529662,"remaining_time":0.3710072338}, -{"learn":[119.120799],"iteration":531,"passed_time":0.4207544349,"remaining_time":0.37013736}, -{"learn":[118.8522754],"iteration":532,"passed_time":0.4216467363,"remaining_time":0.3694353206}, -{"learn":[118.5679242],"iteration":533,"passed_time":0.4225080099,"remaining_time":0.3687054918}, -{"learn":[118.2441926],"iteration":534,"passed_time":0.423325042,"remaining_time":0.3679367187}, -{"learn":[118.0571382],"iteration":535,"passed_time":0.424024176,"remaining_time":0.3670657046}, -{"learn":[117.670673],"iteration":536,"passed_time":0.4248534192,"remaining_time":0.3663075104}, -{"learn":[117.4626467],"iteration":537,"passed_time":0.4255302239,"remaining_time":0.3654181477}, -{"learn":[117.3972691],"iteration":538,"passed_time":0.4263780909,"remaining_time":0.3646758811}, -{"learn":[117.0498585],"iteration":539,"passed_time":0.4271672124,"remaining_time":0.3638831809}, -{"learn":[116.8864118],"iteration":540,"passed_time":0.4279159255,"remaining_time":0.3630562104}, -{"learn":[116.7032135],"iteration":541,"passed_time":0.4287596721,"remaining_time":0.3623098336}, -{"learn":[116.4747407],"iteration":542,"passed_time":0.4294964423,"remaining_time":0.3614730647}, -{"learn":[116.2745499],"iteration":543,"passed_time":0.4302697728,"remaining_time":0.3606673095}, -{"learn":[116.1634434],"iteration":544,"passed_time":0.4310846419,"remaining_time":0.3598963524}, -{"learn":[115.8837329],"iteration":545,"passed_time":0.4318891937,"remaining_time":0.3591166555}, -{"learn":[115.7847364],"iteration":546,"passed_time":0.4327173048,"remaining_time":0.3583563786}, -{"learn":[115.4726518],"iteration":547,"passed_time":0.4350198608,"remaining_time":0.358812002}, -{"learn":[115.2380594],"iteration":548,"passed_time":0.4359567659,"remaining_time":0.3581357038}, -{"learn":[114.9832854],"iteration":549,"passed_time":0.4366890203,"remaining_time":0.3572910166}, -{"learn":[114.7923225],"iteration":550,"passed_time":0.4374918564,"remaining_time":0.3565042532}, -{"learn":[114.7061856],"iteration":551,"passed_time":0.4382692012,"remaining_time":0.355696743}, -{"learn":[114.2484883],"iteration":552,"passed_time":0.4390679069,"remaining_time":0.3549066083}, -{"learn":[113.8249474],"iteration":553,"passed_time":0.4398571943,"remaining_time":0.3541088604}, -{"learn":[113.3587873],"iteration":554,"passed_time":0.4406256533,"remaining_time":0.3532944428}, -{"learn":[113.2361388],"iteration":555,"passed_time":0.4414523995,"remaining_time":0.3525267363}, -{"learn":[112.9708343],"iteration":556,"passed_time":0.4422158464,"remaining_time":0.3517084739}, -{"learn":[112.9087039],"iteration":557,"passed_time":0.4430209028,"remaining_time":0.3509233674}, -{"learn":[112.7109537],"iteration":558,"passed_time":0.4438200187,"remaining_time":0.3501335031}, -{"learn":[112.423592],"iteration":559,"passed_time":0.4446167561,"remaining_time":0.3493417369}, -{"learn":[112.1257099],"iteration":560,"passed_time":0.4454358233,"remaining_time":0.3485674268}, -{"learn":[112.030136],"iteration":561,"passed_time":0.4461519545,"remaining_time":0.3477127332}, -{"learn":[111.7072661],"iteration":562,"passed_time":0.4469219348,"remaining_time":0.3469003295}, -{"learn":[111.4030351],"iteration":563,"passed_time":0.4476818427,"remaining_time":0.3460802898}, -{"learn":[111.239361],"iteration":564,"passed_time":0.4484021036,"remaining_time":0.3452299382}, -{"learn":[110.9951902],"iteration":565,"passed_time":0.449314876,"remaining_time":0.3445276611}, -{"learn":[110.7555032],"iteration":566,"passed_time":0.4502279963,"remaining_time":0.3438249072}, -{"learn":[110.4762113],"iteration":567,"passed_time":0.4511769504,"remaining_time":0.3431486665}, -{"learn":[110.3324024],"iteration":568,"passed_time":0.451994751,"remaining_time":0.3423721225}, -{"learn":[110.1500777],"iteration":569,"passed_time":0.4531206866,"remaining_time":0.3418278864}, -{"learn":[109.8347114],"iteration":570,"passed_time":0.4539982843,"remaining_time":0.3410950332}, -{"learn":[109.6719111],"iteration":571,"passed_time":0.4548505203,"remaining_time":0.340342697}, -{"learn":[109.3507972],"iteration":572,"passed_time":0.4555675976,"remaining_time":0.3394892918}, -{"learn":[109.0393228],"iteration":573,"passed_time":0.4562556818,"remaining_time":0.338614844}, -{"learn":[108.9092308],"iteration":574,"passed_time":0.4569445801,"remaining_time":0.3377416462}, -{"learn":[108.5653394],"iteration":575,"passed_time":0.4576438887,"remaining_time":0.3368767514}, -{"learn":[108.4450533],"iteration":576,"passed_time":0.4583218118,"remaining_time":0.3359967529}, -{"learn":[108.2495707],"iteration":577,"passed_time":0.4589888198,"remaining_time":0.3351094843}, -{"learn":[108.1288216],"iteration":578,"passed_time":0.4596515406,"remaining_time":0.3342198594}, -{"learn":[107.9123132],"iteration":579,"passed_time":0.4603422806,"remaining_time":0.3333513066}, -{"learn":[107.6536187],"iteration":580,"passed_time":0.4610228753,"remaining_time":0.3324760495}, -{"learn":[107.4904367],"iteration":581,"passed_time":0.4617471769,"remaining_time":0.3316328521}, -{"learn":[107.1957928],"iteration":582,"passed_time":0.4624543923,"remaining_time":0.3307778415}, -{"learn":[106.9065549],"iteration":583,"passed_time":0.4631395366,"remaining_time":0.3299076151}, -{"learn":[106.7433792],"iteration":584,"passed_time":0.4642239834,"remaining_time":0.3293212874}, -{"learn":[106.6253206],"iteration":585,"passed_time":0.4652352206,"remaining_time":0.3286815381}, -{"learn":[106.4358702],"iteration":586,"passed_time":0.4660687078,"remaining_time":0.3279154622}, -{"learn":[106.1281316],"iteration":587,"passed_time":0.4672059101,"remaining_time":0.3273619642}, -{"learn":[105.8891673],"iteration":588,"passed_time":0.4679098908,"remaining_time":0.3265041852}, -{"learn":[105.4787768],"iteration":589,"passed_time":0.4686930301,"remaining_time":0.3257019362}, -{"learn":[105.2468209],"iteration":590,"passed_time":0.4693668321,"remaining_time":0.3248240852}, -{"learn":[105.0982909],"iteration":591,"passed_time":0.4701346348,"remaining_time":0.3240117078}, -{"learn":[104.8447079],"iteration":592,"passed_time":0.4707926298,"remaining_time":0.3231241152}, -{"learn":[104.5779736],"iteration":593,"passed_time":0.4715109685,"remaining_time":0.3222785407}, -{"learn":[104.4110502],"iteration":594,"passed_time":0.4721990484,"remaining_time":0.3214127977}, -{"learn":[104.0919646],"iteration":595,"passed_time":0.4729209495,"remaining_time":0.3205705765}, -{"learn":[103.8919657],"iteration":596,"passed_time":0.4736261245,"remaining_time":0.3197174676}, -{"learn":[103.7196598],"iteration":597,"passed_time":0.4743214736,"remaining_time":0.3188582481}, -{"learn":[103.4492347],"iteration":598,"passed_time":0.4750540599,"remaining_time":0.3180245042}, -{"learn":[103.2740587],"iteration":599,"passed_time":0.4757380094,"remaining_time":0.3171586729}, -{"learn":[103.1276456],"iteration":600,"passed_time":0.4764577468,"remaining_time":0.3163172063}, -{"learn":[102.9634171],"iteration":601,"passed_time":0.477147973,"remaining_time":0.3154566333}, -{"learn":[102.8259149],"iteration":602,"passed_time":0.477841893,"remaining_time":0.3145990573}, -{"learn":[102.7001608],"iteration":603,"passed_time":0.4785327652,"remaining_time":0.3137400248}, -{"learn":[102.3681297],"iteration":604,"passed_time":0.4792538273,"remaining_time":0.3129012592}, -{"learn":[102.1013055],"iteration":605,"passed_time":0.4799419689,"remaining_time":0.3120414781}, -{"learn":[102.0398696],"iteration":606,"passed_time":0.4811891844,"remaining_time":0.3115442331}, -{"learn":[101.9173139],"iteration":607,"passed_time":0.4821013094,"remaining_time":0.3108284758}, -{"learn":[101.679954],"iteration":608,"passed_time":0.4828874482,"remaining_time":0.3100311859}, -{"learn":[101.4066016],"iteration":609,"passed_time":0.4837180353,"remaining_time":0.3092623504}, -{"learn":[101.3379424],"iteration":610,"passed_time":0.4844565593,"remaining_time":0.3084346998}, -{"learn":[100.9210462],"iteration":611,"passed_time":0.4851632441,"remaining_time":0.3075871548}, -{"learn":[100.6453409],"iteration":612,"passed_time":0.4859509872,"remaining_time":0.3067912431}, -{"learn":[100.4676302],"iteration":613,"passed_time":0.4866537496,"remaining_time":0.3059419338}, -{"learn":[100.3387898],"iteration":614,"passed_time":0.4873585318,"remaining_time":0.3050943654}, -{"learn":[100.2590426],"iteration":615,"passed_time":0.4880636202,"remaining_time":0.3042474516}, -{"learn":[100.0247543],"iteration":616,"passed_time":0.4887351368,"remaining_time":0.3033801578}, -{"learn":[99.99480267],"iteration":617,"passed_time":0.4894739965,"remaining_time":0.3025551241}, -{"learn":[99.81066888],"iteration":618,"passed_time":0.4901212664,"remaining_time":0.3016739944}, -{"learn":[99.68847078],"iteration":619,"passed_time":0.4908183889,"remaining_time":0.3008241738}, -{"learn":[99.31388979],"iteration":620,"passed_time":0.4915709632,"remaining_time":0.3000086877}, -{"learn":[99.14994819],"iteration":621,"passed_time":0.4921902068,"remaining_time":0.2991123765}, -{"learn":[98.97268473],"iteration":622,"passed_time":0.4928377712,"remaining_time":0.2982340927}, -{"learn":[98.8268423],"iteration":623,"passed_time":0.4935341138,"remaining_time":0.2973859403}, -{"learn":[98.75382222],"iteration":624,"passed_time":0.4942047725,"remaining_time":0.2965228635}, -{"learn":[98.5446163],"iteration":625,"passed_time":0.4948727586,"remaining_time":0.2956588047}, -{"learn":[98.31367241],"iteration":626,"passed_time":0.4960391022,"remaining_time":0.2950918423}, -{"learn":[97.98978586],"iteration":627,"passed_time":0.4969278145,"remaining_time":0.2943585143}, -{"learn":[97.88782136],"iteration":628,"passed_time":0.4977580908,"remaining_time":0.2935902252}, -{"learn":[97.66350146],"iteration":629,"passed_time":0.4986682518,"remaining_time":0.2928686558}, -{"learn":[97.55228588],"iteration":630,"passed_time":0.4994007915,"remaining_time":0.2920426182}, -{"learn":[97.22287279],"iteration":631,"passed_time":0.5002286474,"remaining_time":0.2912723769}, -{"learn":[96.94093467],"iteration":632,"passed_time":0.5008796211,"remaining_time":0.2903994012}, -{"learn":[96.90730378],"iteration":633,"passed_time":0.5016105615,"remaining_time":0.2895732894}, -{"learn":[96.72645055],"iteration":634,"passed_time":0.5022623528,"remaining_time":0.2887019823}, -{"learn":[96.51649102],"iteration":635,"passed_time":0.5029871982,"remaining_time":0.2878731763}, -{"learn":[96.48646053],"iteration":636,"passed_time":0.5036426214,"remaining_time":0.2870051359}, -{"learn":[96.28440839],"iteration":637,"passed_time":0.5043689065,"remaining_time":0.2861779689}, -{"learn":[96.12574304],"iteration":638,"passed_time":0.5051168618,"remaining_time":0.2853633601}, -{"learn":[96.10080732],"iteration":639,"passed_time":0.5057668246,"remaining_time":0.2844938388}, -{"learn":[95.89441847],"iteration":640,"passed_time":0.5064589651,"remaining_time":0.2836486248}, -{"learn":[95.66836282],"iteration":641,"passed_time":0.5071504601,"remaining_time":0.2828035276}, -{"learn":[95.45630098],"iteration":642,"passed_time":0.5078985921,"remaining_time":0.2819903536}, -{"learn":[95.38867496],"iteration":643,"passed_time":0.5085938678,"remaining_time":0.2811481629}, -{"learn":[95.36114208],"iteration":644,"passed_time":0.5092591682,"remaining_time":0.2802899298}, -{"learn":[95.32338394],"iteration":645,"passed_time":0.5099181108,"remaining_time":0.2794288099}, -{"learn":[95.13865864],"iteration":646,"passed_time":0.5106966827,"remaining_time":0.2786335842}, -{"learn":[95.04087212],"iteration":647,"passed_time":0.5113817515,"remaining_time":0.2777876181}, -{"learn":[94.80557884],"iteration":648,"passed_time":0.5121271504,"remaining_time":0.2769747762}, -{"learn":[94.73138699],"iteration":649,"passed_time":0.5129867905,"remaining_time":0.2762236564}, -{"learn":[94.58108436],"iteration":650,"passed_time":0.5138682554,"remaining_time":0.2754839034}, -{"learn":[94.36301263],"iteration":651,"passed_time":0.5146567687,"remaining_time":0.2746941035}, -{"learn":[94.31236028],"iteration":652,"passed_time":0.5153630006,"remaining_time":0.2738605838}, -{"learn":[94.27889282],"iteration":653,"passed_time":0.5159975291,"remaining_time":0.2729895185}, -{"learn":[94.10901123],"iteration":654,"passed_time":0.5167286981,"remaining_time":0.2721700776}, -{"learn":[93.96474618],"iteration":655,"passed_time":0.5174240002,"remaining_time":0.2713320977}, -{"learn":[93.7421906],"iteration":656,"passed_time":0.5181698898,"remaining_time":0.2705209622}, -{"learn":[93.60514066],"iteration":657,"passed_time":0.5188670115,"remaining_time":0.2696846777}, -{"learn":[93.46612437],"iteration":658,"passed_time":0.5196392555,"remaining_time":0.2688876876}, -{"learn":[93.33229342],"iteration":659,"passed_time":0.5203491179,"remaining_time":0.2680586365}, -{"learn":[93.21079587],"iteration":660,"passed_time":0.5211038193,"remaining_time":0.2672529421}, -{"learn":[93.10705876],"iteration":661,"passed_time":0.5218962945,"remaining_time":0.2664666881}, -{"learn":[92.81810673],"iteration":662,"passed_time":0.5226022497,"remaining_time":0.2656364377}, -{"learn":[92.62650619],"iteration":663,"passed_time":0.5233367263,"remaining_time":0.264820994}, -{"learn":[92.42888654],"iteration":664,"passed_time":0.5240591444,"remaining_time":0.2639997194}, -{"learn":[92.09176732],"iteration":665,"passed_time":0.5247766142,"remaining_time":0.26317626}, -{"learn":[92.04854396],"iteration":666,"passed_time":0.5255073908,"remaining_time":0.2623597618}, -{"learn":[91.88550859],"iteration":667,"passed_time":0.5263340708,"remaining_time":0.2615911849}, -{"learn":[91.85350035],"iteration":668,"passed_time":0.5271081748,"remaining_time":0.2607964213}, -{"learn":[91.77807671],"iteration":669,"passed_time":0.528021832,"remaining_time":0.2600704546}, -{"learn":[91.65435283],"iteration":670,"passed_time":0.528839096,"remaining_time":0.2592966655}, -{"learn":[91.56783533],"iteration":671,"passed_time":0.5297163699,"remaining_time":0.2585520377}, -{"learn":[91.26580867],"iteration":672,"passed_time":0.5304303232,"remaining_time":0.2577276607}, -{"learn":[91.16238764],"iteration":673,"passed_time":0.5311953529,"remaining_time":0.2569283161}, -{"learn":[91.10065046],"iteration":674,"passed_time":0.5318643397,"remaining_time":0.2560828302}, -{"learn":[90.85117159],"iteration":675,"passed_time":0.5325459664,"remaining_time":0.2552439247}, -{"learn":[90.69540399],"iteration":676,"passed_time":0.5332966017,"remaining_time":0.2544384082}, -{"learn":[90.57391813],"iteration":677,"passed_time":0.534000504,"remaining_time":0.2536108588}, -{"learn":[90.51437297],"iteration":678,"passed_time":0.5347579481,"remaining_time":0.2528089858}, -{"learn":[90.33889177],"iteration":679,"passed_time":0.5354538608,"remaining_time":0.2519782875}, -{"learn":[90.24276011],"iteration":680,"passed_time":0.5361224954,"remaining_time":0.2511352071}, -{"learn":[90.10789257],"iteration":681,"passed_time":0.5368487517,"remaining_time":0.2503195059}, -{"learn":[89.95880505],"iteration":682,"passed_time":0.5375062891,"remaining_time":0.2494721722}, -{"learn":[89.87212108],"iteration":683,"passed_time":0.5382400652,"remaining_time":0.2486606149}, -{"learn":[89.7088169],"iteration":684,"passed_time":0.5389202001,"remaining_time":0.2478246176}, -{"learn":[89.65178757],"iteration":685,"passed_time":0.5395594986,"remaining_time":0.2469703828}, -{"learn":[89.45790266],"iteration":686,"passed_time":0.5402365223,"remaining_time":0.2461339614}, -{"learn":[89.28717191],"iteration":687,"passed_time":0.5408789428,"remaining_time":0.2452823113}, -{"learn":[89.01579809],"iteration":688,"passed_time":0.5415824167,"remaining_time":0.2444588267}, -{"learn":[88.8669891],"iteration":689,"passed_time":0.5425465212,"remaining_time":0.2437527849}, -{"learn":[88.6699944],"iteration":690,"passed_time":0.5434097686,"remaining_time":0.2430008951}, -{"learn":[88.56760592],"iteration":691,"passed_time":0.5442042224,"remaining_time":0.2422180643}, -{"learn":[88.44988882],"iteration":692,"passed_time":0.5449873307,"remaining_time":0.2414301739}, -{"learn":[88.27053152],"iteration":693,"passed_time":0.5457085581,"remaining_time":0.2406150127}, -{"learn":[88.2414795],"iteration":694,"passed_time":0.5464008981,"remaining_time":0.2397874445}, -{"learn":[88.15186789],"iteration":695,"passed_time":0.5471067144,"remaining_time":0.2389661511}, -{"learn":[88.05019566],"iteration":696,"passed_time":0.5477748726,"remaining_time":0.2381288184}, -{"learn":[87.99223331],"iteration":697,"passed_time":0.5484767048,"remaining_time":0.2373065399}, -{"learn":[87.73657542],"iteration":698,"passed_time":0.5492154977,"remaining_time":0.2365005219}, -{"learn":[87.66230383],"iteration":699,"passed_time":0.549911164,"remaining_time":0.2356762131}, -{"learn":[87.55509554],"iteration":700,"passed_time":0.5506003596,"remaining_time":0.2348495114}, -{"learn":[87.3960713],"iteration":701,"passed_time":0.5513270902,"remaining_time":0.2340391352}, -{"learn":[87.35367605],"iteration":702,"passed_time":0.5520176629,"remaining_time":0.233213721}, -{"learn":[87.31612871],"iteration":703,"passed_time":0.5527335476,"remaining_time":0.2323993325}, -{"learn":[87.28308304],"iteration":704,"passed_time":0.5534368292,"remaining_time":0.2315799498}, -{"learn":[87.10333984],"iteration":705,"passed_time":0.5541477179,"remaining_time":0.2307640638}, -{"learn":[86.95975665],"iteration":706,"passed_time":0.5548600751,"remaining_time":0.2299490834}, -{"learn":[86.74618961],"iteration":707,"passed_time":0.5555675455,"remaining_time":0.2291323775}, -{"learn":[86.59571901],"iteration":708,"passed_time":0.5562639173,"remaining_time":0.2283114244}, -{"learn":[86.5503641],"iteration":709,"passed_time":0.5570510524,"remaining_time":0.2275278947}, -{"learn":[86.37323419],"iteration":710,"passed_time":0.5586928433,"remaining_time":0.2270917465}, -{"learn":[86.33957217],"iteration":711,"passed_time":0.5595801722,"remaining_time":0.2263470359}, -{"learn":[86.14207856],"iteration":712,"passed_time":0.5606072798,"remaining_time":0.2256581898}, -{"learn":[85.97356517],"iteration":713,"passed_time":0.5622974661,"remaining_time":0.225233999}, -{"learn":[85.82683155],"iteration":714,"passed_time":0.5630898267,"remaining_time":0.2244483924}, -{"learn":[85.66753612],"iteration":715,"passed_time":0.563774477,"remaining_time":0.2236200439}, -{"learn":[85.59529512],"iteration":716,"passed_time":0.5645601233,"remaining_time":0.2228319594}, -{"learn":[85.54704191],"iteration":717,"passed_time":0.5653043118,"remaining_time":0.2220275988}, -{"learn":[85.44003169],"iteration":718,"passed_time":0.5659984098,"remaining_time":0.2212038292}, -{"learn":[85.35282701],"iteration":719,"passed_time":0.5666923085,"remaining_time":0.2203803422}, -{"learn":[85.11502108],"iteration":720,"passed_time":0.5673529244,"remaining_time":0.2195443355}, -{"learn":[84.99295044],"iteration":721,"passed_time":0.5680756424,"remaining_time":0.2187327266}, -{"learn":[84.84849866],"iteration":722,"passed_time":0.5689289809,"remaining_time":0.2179714076}, -{"learn":[84.719048],"iteration":723,"passed_time":0.5696097063,"remaining_time":0.2171440317}, -{"learn":[84.61907861],"iteration":724,"passed_time":0.5703740463,"remaining_time":0.2163487762}, -{"learn":[84.50627558],"iteration":725,"passed_time":0.5710536894,"remaining_time":0.2155216404}, -{"learn":[84.40820784],"iteration":726,"passed_time":0.5717404397,"remaining_time":0.2146975791}, -{"learn":[84.26996548],"iteration":727,"passed_time":0.5724877312,"remaining_time":0.213896515}, -{"learn":[84.13394306],"iteration":728,"passed_time":0.5736926945,"remaining_time":0.2132657342}, -{"learn":[84.07218013],"iteration":729,"passed_time":0.5744745587,"remaining_time":0.2124768916}, -{"learn":[83.96978134],"iteration":730,"passed_time":0.5752835297,"remaining_time":0.2116980431}, -{"learn":[83.87239832],"iteration":731,"passed_time":0.5764675086,"remaining_time":0.2110564102}, -{"learn":[83.68730916],"iteration":732,"passed_time":0.5772072764,"remaining_time":0.2102514909}, -{"learn":[83.59803147],"iteration":733,"passed_time":0.5779085189,"remaining_time":0.2094327875}, -{"learn":[83.50674817],"iteration":734,"passed_time":0.5785820087,"remaining_time":0.2086043977}, -{"learn":[83.40334104],"iteration":735,"passed_time":0.5792795435,"remaining_time":0.2077850536}, -{"learn":[83.34292332],"iteration":736,"passed_time":0.5799848833,"remaining_time":0.2069688254}, -{"learn":[83.2841552],"iteration":737,"passed_time":0.5807341513,"remaining_time":0.2061684927}, -{"learn":[83.1251919],"iteration":738,"passed_time":0.5814365262,"remaining_time":0.2053517366}, -{"learn":[82.95627663],"iteration":739,"passed_time":0.5821384379,"remaining_time":0.2045351268}, -{"learn":[82.86885815],"iteration":740,"passed_time":0.5828135561,"remaining_time":0.2037094616}, -{"learn":[82.71013967],"iteration":741,"passed_time":0.5835325297,"remaining_time":0.202899451}, -{"learn":[82.65586453],"iteration":742,"passed_time":0.5842183786,"remaining_time":0.2020782279}, -{"learn":[82.49744791],"iteration":743,"passed_time":0.5849488148,"remaining_time":0.2012727105}, -{"learn":[82.41674243],"iteration":744,"passed_time":0.5856415308,"remaining_time":0.2004544837}, -{"learn":[82.29664345],"iteration":745,"passed_time":0.5863524964,"remaining_time":0.1996428071}, -{"learn":[82.15066517],"iteration":746,"passed_time":0.587052426,"remaining_time":0.1988276624}, -{"learn":[82.0370734],"iteration":747,"passed_time":0.5877491631,"remaining_time":0.1980117502}, -{"learn":[81.86493678],"iteration":748,"passed_time":0.5906236844,"remaining_time":0.197925961}, -{"learn":[81.6952753],"iteration":749,"passed_time":0.5915055033,"remaining_time":0.1971685011}, -{"learn":[81.52076307],"iteration":750,"passed_time":0.5922371784,"remaining_time":0.1963609286}, -{"learn":[81.47245048],"iteration":751,"passed_time":0.5929570817,"remaining_time":0.1955496759}, -{"learn":[81.33248962],"iteration":752,"passed_time":0.5936410334,"remaining_time":0.1947268728}, -{"learn":[81.17612462],"iteration":753,"passed_time":0.5943301182,"remaining_time":0.1939061128}, -{"learn":[81.03819438],"iteration":754,"passed_time":0.5950057994,"remaining_time":0.1930813521}, -{"learn":[80.96675016],"iteration":755,"passed_time":0.5956989581,"remaining_time":0.1922626267}, -{"learn":[80.82905315],"iteration":756,"passed_time":0.596402797,"remaining_time":0.1914476614}, -{"learn":[80.70283152],"iteration":757,"passed_time":0.5971121262,"remaining_time":0.1906347421}, -{"learn":[80.59212139],"iteration":758,"passed_time":0.5978766948,"remaining_time":0.1898396356}, -{"learn":[80.47676188],"iteration":759,"passed_time":0.5985269763,"remaining_time":0.1890085188}, -{"learn":[80.39467719],"iteration":760,"passed_time":0.5992300805,"remaining_time":0.1881944668}, -{"learn":[80.29316384],"iteration":761,"passed_time":0.5999728603,"remaining_time":0.1873930981}, -{"learn":[80.25419775],"iteration":762,"passed_time":0.6006487726,"remaining_time":0.1865711128}, -{"learn":[80.18231027],"iteration":763,"passed_time":0.601315945,"remaining_time":0.1857468102}, -{"learn":[80.03530867],"iteration":764,"passed_time":0.6020048575,"remaining_time":0.1849295968}, -{"learn":[79.882003],"iteration":765,"passed_time":0.602681393,"remaining_time":0.1841089373}, -{"learn":[79.77233594],"iteration":766,"passed_time":0.6033975014,"remaining_time":0.1833006751}, -{"learn":[79.71948209],"iteration":767,"passed_time":0.6043010055,"remaining_time":0.1825492621}, -{"learn":[79.68256007],"iteration":768,"passed_time":0.6051361803,"remaining_time":0.181776928}, -{"learn":[79.5728087],"iteration":769,"passed_time":0.6059024247,"remaining_time":0.1809838411}, -{"learn":[79.48980365],"iteration":770,"passed_time":0.606774762,"remaining_time":0.1802223353}, -{"learn":[79.45344299],"iteration":771,"passed_time":0.6074893324,"remaining_time":0.1794139479}, -{"learn":[79.42383769],"iteration":772,"passed_time":0.6081550234,"remaining_time":0.1785914493}, -{"learn":[79.35304737],"iteration":773,"passed_time":0.6088516295,"remaining_time":0.1777783828}, -{"learn":[79.31777929],"iteration":774,"passed_time":0.6095126888,"remaining_time":0.1769552967}, -{"learn":[79.27584682],"iteration":775,"passed_time":0.6101470987,"remaining_time":0.1761249357}, -{"learn":[79.22300776],"iteration":776,"passed_time":0.6108225602,"remaining_time":0.1753068609}, -{"learn":[79.16336216],"iteration":777,"passed_time":0.6114805473,"remaining_time":0.1744841665}, -{"learn":[79.11542806],"iteration":778,"passed_time":0.6121921703,"remaining_time":0.1736771112}, -{"learn":[79.05297899],"iteration":779,"passed_time":0.6128772226,"remaining_time":0.1728628064}, -{"learn":[79.0055555],"iteration":780,"passed_time":0.6136133411,"remaining_time":0.172063152}, -{"learn":[78.96247249],"iteration":781,"passed_time":0.6143069065,"remaining_time":0.1712517975}, -{"learn":[78.93486725],"iteration":782,"passed_time":0.6150214491,"remaining_time":0.1704465574}, -{"learn":[78.86074492],"iteration":783,"passed_time":0.6157406477,"remaining_time":0.1696428315}, -{"learn":[78.7980841],"iteration":784,"passed_time":0.6164512964,"remaining_time":0.1688369793}, -{"learn":[78.68675842],"iteration":785,"passed_time":0.6171261859,"remaining_time":0.1680216333}, -{"learn":[78.56844849],"iteration":786,"passed_time":0.6178367384,"remaining_time":0.1672162964}, -{"learn":[78.45522467],"iteration":787,"passed_time":0.6189996585,"remaining_time":0.166532903}, -{"learn":[78.42915368],"iteration":788,"passed_time":0.6198542451,"remaining_time":0.1657658374}, -{"learn":[78.33271787],"iteration":789,"passed_time":0.6206450239,"remaining_time":0.1649815886}, -{"learn":[78.28517094],"iteration":790,"passed_time":0.6214469391,"remaining_time":0.1642002658}, -{"learn":[78.22550739],"iteration":791,"passed_time":0.6222275815,"remaining_time":0.1634133042}, -{"learn":[78.08128623],"iteration":792,"passed_time":0.6229308602,"remaining_time":0.162606164}, -{"learn":[78.00227098],"iteration":793,"passed_time":0.6236103449,"remaining_time":0.1617931122}, -{"learn":[77.84685715],"iteration":794,"passed_time":0.6243148914,"remaining_time":0.1609868588}, -{"learn":[77.7664605],"iteration":795,"passed_time":0.6251173567,"remaining_time":0.1602059557}, -{"learn":[77.67018672],"iteration":796,"passed_time":0.6258230666,"remaining_time":0.1594003545}, -{"learn":[77.58910134],"iteration":797,"passed_time":0.6265105294,"remaining_time":0.1585903846}, -{"learn":[77.44372873],"iteration":798,"passed_time":0.6272122011,"remaining_time":0.1577842959}, -{"learn":[77.37497353],"iteration":799,"passed_time":0.627911349,"remaining_time":0.1569778373}, -{"learn":[77.34882375],"iteration":800,"passed_time":0.6285931477,"remaining_time":0.1561673363}, -{"learn":[77.19068707],"iteration":801,"passed_time":0.6292539758,"remaining_time":0.1553519791}, -{"learn":[77.08922719],"iteration":802,"passed_time":0.6299858325,"remaining_time":0.1545544321}, -{"learn":[76.9729166],"iteration":803,"passed_time":0.6306619541,"remaining_time":0.1537434615}, -{"learn":[76.90348413],"iteration":804,"passed_time":0.6313562737,"remaining_time":0.152937234}, -{"learn":[76.82666583],"iteration":805,"passed_time":0.6320448005,"remaining_time":0.15212989}, -{"learn":[76.68116626],"iteration":806,"passed_time":0.6327180983,"remaining_time":0.1513191982}, -{"learn":[76.63374313],"iteration":807,"passed_time":0.6334837843,"remaining_time":0.1505308002}, -{"learn":[76.54946199],"iteration":808,"passed_time":0.6353693259,"remaining_time":0.1500068495}, -{"learn":[76.49248776],"iteration":809,"passed_time":0.636153259,"remaining_time":0.1492211348}, -{"learn":[76.30567098],"iteration":810,"passed_time":0.6369338775,"remaining_time":0.1484346521}, -{"learn":[76.20513103],"iteration":811,"passed_time":0.6380841085,"remaining_time":0.1477337591}, -{"learn":[76.14947506],"iteration":812,"passed_time":0.6388279669,"remaining_time":0.1469382901}, -{"learn":[76.07795348],"iteration":813,"passed_time":0.6395071892,"remaining_time":0.1461281784}, -{"learn":[76.03071793],"iteration":814,"passed_time":0.6401823992,"remaining_time":0.1453174771}, -{"learn":[75.85114002],"iteration":815,"passed_time":0.6409228105,"remaining_time":0.1445218102}, -{"learn":[75.72457319],"iteration":816,"passed_time":0.6446017846,"remaining_time":0.1443844879}, -{"learn":[75.62457528],"iteration":817,"passed_time":0.6453220021,"remaining_time":0.1435802009}, -{"learn":[75.57980812],"iteration":818,"passed_time":0.6460609694,"remaining_time":0.1427802631}, -{"learn":[75.50199726],"iteration":819,"passed_time":0.646843049,"remaining_time":0.1419899376}, -{"learn":[75.42896303],"iteration":820,"passed_time":0.6475823158,"remaining_time":0.1411902978}, -{"learn":[75.28107282],"iteration":821,"passed_time":0.6484158123,"remaining_time":0.14041121}, -{"learn":[75.24370116],"iteration":822,"passed_time":0.6491719459,"remaining_time":0.1396153517}, -{"learn":[75.21163781],"iteration":823,"passed_time":0.649970331,"remaining_time":0.1388286144}, -{"learn":[75.11238047],"iteration":824,"passed_time":0.6510685248,"remaining_time":0.1381054447}, -{"learn":[75.07821544],"iteration":825,"passed_time":0.6520026608,"remaining_time":0.1373468075}, -{"learn":[74.99763688],"iteration":826,"passed_time":0.6529194105,"remaining_time":0.1365841088}, -{"learn":[74.97260021],"iteration":827,"passed_time":0.6537992908,"remaining_time":0.1358133792}, -{"learn":[74.82280395],"iteration":828,"passed_time":0.6552552012,"remaining_time":0.1351612056}, -{"learn":[74.7142409],"iteration":829,"passed_time":0.656189855,"remaining_time":0.1344003317}, -{"learn":[74.68809134],"iteration":830,"passed_time":0.6571676623,"remaining_time":0.1336478158}, -{"learn":[74.66735038],"iteration":831,"passed_time":0.6579127713,"remaining_time":0.1328477711}, -{"learn":[74.62791607],"iteration":832,"passed_time":0.6587264532,"remaining_time":0.1320616059}, -{"learn":[74.4614704],"iteration":833,"passed_time":0.659462972,"remaining_time":0.131260016}, -{"learn":[74.42822033],"iteration":834,"passed_time":0.6602649954,"remaining_time":0.130471526}, -{"learn":[74.3786069],"iteration":835,"passed_time":0.6610943341,"remaining_time":0.1296883622}, -{"learn":[74.24628149],"iteration":836,"passed_time":0.6618613519,"remaining_time":0.1288929515}, -{"learn":[74.17289785],"iteration":837,"passed_time":0.6626922026,"remaining_time":0.1281099485}, -{"learn":[74.04392916],"iteration":838,"passed_time":0.6634163162,"remaining_time":0.1273063491}, -{"learn":[73.8944423],"iteration":839,"passed_time":0.6641587281,"remaining_time":0.1265064244}, -{"learn":[73.75377577],"iteration":840,"passed_time":0.6650179013,"remaining_time":0.1257287114}, -{"learn":[73.63426609],"iteration":841,"passed_time":0.6657815694,"remaining_time":0.1249328836}, -{"learn":[73.55942885],"iteration":842,"passed_time":0.6672051215,"remaining_time":0.1242600286}, -{"learn":[73.45438013],"iteration":843,"passed_time":0.6681559868,"remaining_time":0.123498026}, -{"learn":[73.2753604],"iteration":844,"passed_time":0.6690018166,"remaining_time":0.1227163095}, -{"learn":[73.10596468],"iteration":845,"passed_time":0.669853699,"remaining_time":0.1219355433}, -{"learn":[73.05334097],"iteration":846,"passed_time":0.6706752308,"remaining_time":0.1211491267}, -{"learn":[72.9262698],"iteration":847,"passed_time":0.6714689845,"remaining_time":0.1203576482}, -{"learn":[72.79142922],"iteration":848,"passed_time":0.672280738,"remaining_time":0.1195693657}, -{"learn":[72.66419711],"iteration":849,"passed_time":0.6731237482,"remaining_time":0.1187865438}, -{"learn":[72.46903723],"iteration":850,"passed_time":0.6741558469,"remaining_time":0.1180366876}, -{"learn":[72.34320795],"iteration":851,"passed_time":0.674995203,"remaining_time":0.1172526879}, -{"learn":[72.23244127],"iteration":852,"passed_time":0.675750342,"remaining_time":0.1164540449}, -{"learn":[72.20557751],"iteration":853,"passed_time":0.676586844,"remaining_time":0.1156694136}, -{"learn":[72.10773374],"iteration":854,"passed_time":0.6773857092,"remaining_time":0.1148782782}, -{"learn":[72.0656456],"iteration":855,"passed_time":0.6782271564,"remaining_time":0.114094288}, -{"learn":[71.9769982],"iteration":856,"passed_time":0.6790042911,"remaining_time":0.1132994325}, -{"learn":[71.8705715],"iteration":857,"passed_time":0.6797565169,"remaining_time":0.1125004958}, -{"learn":[71.79586444],"iteration":858,"passed_time":0.6805495615,"remaining_time":0.1117083681}, -{"learn":[71.73351722],"iteration":859,"passed_time":0.6813806989,"remaining_time":0.1109224393}, -{"learn":[71.71965344],"iteration":860,"passed_time":0.6843146625,"remaining_time":0.1104758863}, -{"learn":[71.66132076],"iteration":861,"passed_time":0.6853144471,"remaining_time":0.1097139138}, -{"learn":[71.55484646],"iteration":862,"passed_time":0.6867685104,"remaining_time":0.1090235063}, -{"learn":[71.53760399],"iteration":863,"passed_time":0.6875069268,"remaining_time":0.1082186829}, -{"learn":[71.50761043],"iteration":864,"passed_time":0.6881854401,"remaining_time":0.1074046641}, -{"learn":[71.36594699],"iteration":865,"passed_time":0.6889109724,"remaining_time":0.1065982336}, -{"learn":[71.19238423],"iteration":866,"passed_time":0.6896490046,"remaining_time":0.1057939073}, -{"learn":[71.17217622],"iteration":867,"passed_time":0.6903313609,"remaining_time":0.1049812669}, -{"learn":[71.07745294],"iteration":868,"passed_time":0.6910408358,"remaining_time":0.1041730144}, -{"learn":[70.97565875],"iteration":869,"passed_time":0.6917777274,"remaining_time":0.1033690857}, -{"learn":[70.89321102],"iteration":870,"passed_time":0.6924913438,"remaining_time":0.1025618638}, -{"learn":[70.85875957],"iteration":871,"passed_time":0.6932283828,"remaining_time":0.1017582947}, -{"learn":[70.75038623],"iteration":872,"passed_time":0.6939115379,"remaining_time":0.1009470393}, -{"learn":[70.6590808],"iteration":873,"passed_time":0.6946233456,"remaining_time":0.1001402077}, -{"learn":[70.58091606],"iteration":874,"passed_time":0.6953081779,"remaining_time":0.0993297397}, -{"learn":[70.44697908],"iteration":875,"passed_time":0.6959603075,"remaining_time":0.09851492938}, -{"learn":[70.36441182],"iteration":876,"passed_time":0.696792626,"remaining_time":0.09772576169}, -{"learn":[70.20024085],"iteration":877,"passed_time":0.6977678203,"remaining_time":0.09695634861}, -{"learn":[70.1359221],"iteration":878,"passed_time":0.6986643202,"remaining_time":0.09617563452}, -{"learn":[70.03250378],"iteration":879,"passed_time":0.6997948602,"remaining_time":0.09542657185}, -{"learn":[69.93876123],"iteration":880,"passed_time":0.7005115811,"remaining_time":0.09462074705}, -{"learn":[69.83200036],"iteration":881,"passed_time":0.7012154264,"remaining_time":0.09381340172}, -{"learn":[69.62171708],"iteration":882,"passed_time":0.7019161623,"remaining_time":0.0930058788}, -{"learn":[69.51234224],"iteration":883,"passed_time":0.7026312778,"remaining_time":0.09220048442}, -{"learn":[69.36816508],"iteration":884,"passed_time":0.7032897044,"remaining_time":0.09138792769}, -{"learn":[69.26007535],"iteration":885,"passed_time":0.7040017632,"remaining_time":0.09058261965}, -{"learn":[69.17361388],"iteration":886,"passed_time":0.7046900981,"remaining_time":0.08977449954}, -{"learn":[69.0210084],"iteration":887,"passed_time":0.7053875471,"remaining_time":0.08896779874}, -{"learn":[68.88465565],"iteration":888,"passed_time":0.7061038987,"remaining_time":0.08816370389}, -{"learn":[68.74682108],"iteration":889,"passed_time":0.7067979961,"remaining_time":0.0873570557}, -{"learn":[68.70150075],"iteration":890,"passed_time":0.7074994733,"remaining_time":0.08655156295}, -{"learn":[68.63792821],"iteration":891,"passed_time":0.7082703894,"remaining_time":0.08575471083}, -{"learn":[68.57925914],"iteration":892,"passed_time":0.7090276718,"remaining_time":0.08495628319}, -{"learn":[68.51565267],"iteration":893,"passed_time":0.709760808,"remaining_time":0.08415508461}, -{"learn":[68.42777149],"iteration":894,"passed_time":0.7104314136,"remaining_time":0.08334670215}, -{"learn":[68.39382409],"iteration":895,"passed_time":0.7111296328,"remaining_time":0.08254183238}, -{"learn":[68.30705352],"iteration":896,"passed_time":0.7120700904,"remaining_time":0.08176501595}, -{"learn":[68.1673343],"iteration":897,"passed_time":0.7129561074,"remaining_time":0.08098165139}, -{"learn":[68.13581243],"iteration":898,"passed_time":0.7137082921,"remaining_time":0.08018302281}, -{"learn":[68.01723987],"iteration":899,"passed_time":0.7144951883,"remaining_time":0.07938835426}, -{"learn":[67.94841632],"iteration":900,"passed_time":0.7154595924,"remaining_time":0.07861320715}, -{"learn":[67.83548288],"iteration":901,"passed_time":0.7161962682,"remaining_time":0.07781289831}, -{"learn":[67.75339725],"iteration":902,"passed_time":0.7169133915,"remaining_time":0.07701063009}, -{"learn":[67.61928604],"iteration":903,"passed_time":0.7176448507,"remaining_time":0.07621007264}, -{"learn":[67.47890298],"iteration":904,"passed_time":0.7183362581,"remaining_time":0.07540546355}, -{"learn":[67.40245754],"iteration":905,"passed_time":0.7190577685,"remaining_time":0.07460422764}, -{"learn":[67.25332785],"iteration":906,"passed_time":0.7197137811,"remaining_time":0.07379645165}, -{"learn":[67.18688177],"iteration":907,"passed_time":0.7204338041,"remaining_time":0.07299549557}, -{"learn":[67.12462282],"iteration":908,"passed_time":0.7211155593,"remaining_time":0.07219088658}, -{"learn":[67.09869793],"iteration":909,"passed_time":0.7218025624,"remaining_time":0.07138706661}, -{"learn":[66.92775555],"iteration":910,"passed_time":0.7225011901,"remaining_time":0.07058463877}, -{"learn":[66.79737891],"iteration":911,"passed_time":0.7231798604,"remaining_time":0.06978051285}, -{"learn":[66.61888136],"iteration":912,"passed_time":0.7238431731,"remaining_time":0.06897519831}, -{"learn":[66.52898648],"iteration":913,"passed_time":0.72457054,"remaining_time":0.06817622149}, -{"learn":[66.42846973],"iteration":914,"passed_time":0.7252475986,"remaining_time":0.06737272774}, -{"learn":[66.37536759],"iteration":915,"passed_time":0.725992162,"remaining_time":0.06657570045}, -{"learn":[66.33675846],"iteration":916,"passed_time":0.7267023506,"remaining_time":0.06577567623}, -{"learn":[66.18297625],"iteration":917,"passed_time":0.7274544107,"remaining_time":0.06497958789}, -{"learn":[66.05337085],"iteration":918,"passed_time":0.7286414421,"remaining_time":0.06422193342}, -{"learn":[65.98687591],"iteration":919,"passed_time":0.7294558679,"remaining_time":0.06343094504}, -{"learn":[65.97236793],"iteration":920,"passed_time":0.730226908,"remaining_time":0.06263618429}, -{"learn":[65.95830318],"iteration":921,"passed_time":0.7311186414,"remaining_time":0.0618516855}, -{"learn":[65.93802431],"iteration":922,"passed_time":0.7318333646,"remaining_time":0.06105218751}, -{"learn":[65.88563313],"iteration":923,"passed_time":0.7326164067,"remaining_time":0.06025849233}, -{"learn":[65.7892447],"iteration":924,"passed_time":0.7332844598,"remaining_time":0.05945549674}, -{"learn":[65.67058669],"iteration":925,"passed_time":0.7339738381,"remaining_time":0.05865449678}, -{"learn":[65.63703386],"iteration":926,"passed_time":0.7346531412,"remaining_time":0.05785294423}, -{"learn":[65.5087068],"iteration":927,"passed_time":0.7353080274,"remaining_time":0.05704976075}, -{"learn":[65.37954195],"iteration":928,"passed_time":0.7360054165,"remaining_time":0.05625014486}, -{"learn":[65.3183101],"iteration":929,"passed_time":0.7367071982,"remaining_time":0.05545107944}, -{"learn":[65.20737401],"iteration":930,"passed_time":0.7374434239,"remaining_time":0.05465477578}, -{"learn":[65.17874923],"iteration":931,"passed_time":0.7381870213,"remaining_time":0.05385913889}, -{"learn":[65.08987741],"iteration":932,"passed_time":0.738877287,"remaining_time":0.05305978374}, -{"learn":[64.99861268],"iteration":933,"passed_time":0.7395721656,"remaining_time":0.05226098815}, -{"learn":[64.87984454],"iteration":934,"passed_time":0.7403285973,"remaining_time":0.05146669393}, -{"learn":[64.80972424],"iteration":935,"passed_time":0.7409844963,"remaining_time":0.05066560658}, -{"learn":[64.73814004],"iteration":936,"passed_time":0.7417510555,"remaining_time":0.04987226947}, -{"learn":[64.65819097],"iteration":937,"passed_time":0.7424225699,"remaining_time":0.04907270718}, -{"learn":[64.5587909],"iteration":938,"passed_time":0.7434650618,"remaining_time":0.04829751733}, -{"learn":[64.44593228],"iteration":939,"passed_time":0.7442668153,"remaining_time":0.04750639247}, -{"learn":[64.39097823],"iteration":940,"passed_time":0.7450567587,"remaining_time":0.04671450453}, -{"learn":[64.27577767],"iteration":941,"passed_time":0.7462359348,"remaining_time":0.04594658622}, -{"learn":[64.15712342],"iteration":942,"passed_time":0.7470644316,"remaining_time":0.04515659873}, -{"learn":[64.06704534],"iteration":943,"passed_time":0.7477343568,"remaining_time":0.04435712286}, -{"learn":[64.04056429],"iteration":944,"passed_time":0.7484645373,"remaining_time":0.04356142809}, -{"learn":[63.9472476],"iteration":945,"passed_time":0.7491311549,"remaining_time":0.04276224351}, -{"learn":[63.8379816],"iteration":946,"passed_time":0.7498115534,"remaining_time":0.04196411017}, -{"learn":[63.72808541],"iteration":947,"passed_time":0.7505355772,"remaining_time":0.04116861816}, -{"learn":[63.63637843],"iteration":948,"passed_time":0.7512215522,"remaining_time":0.040371232}, -{"learn":[63.57742148],"iteration":949,"passed_time":0.7518819121,"remaining_time":0.03957273221}, -{"learn":[63.512264],"iteration":950,"passed_time":0.7526075765,"remaining_time":0.03877788775}, -{"learn":[63.36761033],"iteration":951,"passed_time":0.7532844143,"remaining_time":0.03798072677}, -{"learn":[63.30219425],"iteration":952,"passed_time":0.7539743267,"remaining_time":0.03718446312}, -{"learn":[63.27483452],"iteration":953,"passed_time":0.7546193261,"remaining_time":0.03638625681}, -{"learn":[63.23222472],"iteration":954,"passed_time":0.7557337254,"remaining_time":0.03561048968}, -{"learn":[63.16959836],"iteration":955,"passed_time":0.7564469042,"remaining_time":0.03481554789}, -{"learn":[63.05808935],"iteration":956,"passed_time":0.7572112737,"remaining_time":0.03402307708}, -{"learn":[62.99698818],"iteration":957,"passed_time":0.7579507075,"remaining_time":0.03322957173}, -{"learn":[62.89499542],"iteration":958,"passed_time":0.7590476746,"remaining_time":0.03245146471}, -{"learn":[62.85488529],"iteration":959,"passed_time":0.7599028588,"remaining_time":0.03166261911}, -{"learn":[62.75814145],"iteration":960,"passed_time":0.7607528104,"remaining_time":0.03087342311}, -{"learn":[62.67031583],"iteration":961,"passed_time":0.7616074298,"remaining_time":0.03008428517}, -{"learn":[62.59874788],"iteration":962,"passed_time":0.7623402754,"remaining_time":0.02929033249}, -{"learn":[62.54413822],"iteration":963,"passed_time":0.7630292648,"remaining_time":0.02849486881}, -{"learn":[62.45073587],"iteration":964,"passed_time":0.7637141729,"remaining_time":0.02769947777}, -{"learn":[62.3819076],"iteration":965,"passed_time":0.7643869835,"remaining_time":0.02690388969}, -{"learn":[62.34907098],"iteration":966,"passed_time":0.7650612485,"remaining_time":0.02610860517}, -{"learn":[62.25972654],"iteration":967,"passed_time":0.7657204321,"remaining_time":0.02531307214}, -{"learn":[62.20674847],"iteration":968,"passed_time":0.7663664432,"remaining_time":0.02451739911}, -{"learn":[62.16446083],"iteration":969,"passed_time":0.7670603849,"remaining_time":0.02372351706}, -{"learn":[62.08235998],"iteration":970,"passed_time":0.7677608686,"remaining_time":0.02293003624}, -{"learn":[61.96334733],"iteration":971,"passed_time":0.7685030213,"remaining_time":0.02213794712}, -{"learn":[61.88442894],"iteration":972,"passed_time":0.769243657,"remaining_time":0.02134591854}, -{"learn":[61.79490922],"iteration":973,"passed_time":0.7699819188,"remaining_time":0.02055393213}, -{"learn":[61.64128691],"iteration":974,"passed_time":0.7706759824,"remaining_time":0.01976092263}, -{"learn":[61.55177583],"iteration":975,"passed_time":0.7713573601,"remaining_time":0.01896780394}, -{"learn":[61.51457543],"iteration":976,"passed_time":0.7720570714,"remaining_time":0.01817534559}, -{"learn":[61.42560945],"iteration":977,"passed_time":0.7727304475,"remaining_time":0.0173824845}, -{"learn":[61.40207171],"iteration":978,"passed_time":0.7739470789,"remaining_time":0.01660152059}, -{"learn":[61.30660088],"iteration":979,"passed_time":0.7748018265,"remaining_time":0.01581228217}, -{"learn":[61.268583],"iteration":980,"passed_time":0.7756881515,"remaining_time":0.01502352179}, -{"learn":[61.22934796],"iteration":981,"passed_time":0.7765191894,"remaining_time":0.0142335493}, -{"learn":[61.20616682],"iteration":982,"passed_time":0.7772627708,"remaining_time":0.01344198078}, -{"learn":[61.08147013],"iteration":983,"passed_time":0.7779311251,"remaining_time":0.01264928659}, -{"learn":[61.05608199],"iteration":984,"passed_time":0.7786545169,"remaining_time":0.011857683}, -{"learn":[61.0355597],"iteration":985,"passed_time":0.7793449624,"remaining_time":0.01106574997}, -{"learn":[61.00908742],"iteration":986,"passed_time":0.7800460682,"remaining_time":0.01027416301}, -{"learn":[60.92877525],"iteration":987,"passed_time":0.7807541059,"remaining_time":0.009482843392}, -{"learn":[60.85971462],"iteration":988,"passed_time":0.7814405784,"remaining_time":0.008691452338}, -{"learn":[60.74474777],"iteration":989,"passed_time":0.7821448923,"remaining_time":0.007900453458}, -{"learn":[60.62859732],"iteration":990,"passed_time":0.7828729334,"remaining_time":0.007109845005}, -{"learn":[60.53948158],"iteration":991,"passed_time":0.7835951532,"remaining_time":0.006319315752}, -{"learn":[60.47245961],"iteration":992,"passed_time":0.7843303191,"remaining_time":0.005529015341}, -{"learn":[60.3302556],"iteration":993,"passed_time":0.7850193399,"remaining_time":0.004738547323}, -{"learn":[60.24396727],"iteration":994,"passed_time":0.7857177643,"remaining_time":0.003948330474}, -{"learn":[60.1186116],"iteration":995,"passed_time":0.7864366727,"remaining_time":0.003158380212}, -{"learn":[60.05543163],"iteration":996,"passed_time":0.7871567627,"remaining_time":0.002368576016}, -{"learn":[60.02251974],"iteration":997,"passed_time":0.7878706109,"remaining_time":0.00157889902}, -{"learn":[59.94975844],"iteration":998,"passed_time":0.7885718143,"remaining_time":0.0007893611754}, -{"learn":[59.92150919],"iteration":999,"passed_time":0.7896422748,"remaining_time":0} +{"learn":[3606.387858],"iteration":0,"passed_time":0.00102296608,"remaining_time":1.021943114}, +{"learn":[3518.319482],"iteration":1,"passed_time":0.001998732645,"remaining_time":0.9973675897}, +{"learn":[3419.905128],"iteration":2,"passed_time":0.002988567905,"remaining_time":0.9932007339}, +{"learn":[3332.348887],"iteration":3,"passed_time":0.003886783049,"remaining_time":0.9678089793}, +{"learn":[3241.127893],"iteration":4,"passed_time":0.004792898726,"remaining_time":0.9537868465}, +{"learn":[3157.197799],"iteration":5,"passed_time":0.005502921871,"remaining_time":0.9116507234}, +{"learn":[3084.268305],"iteration":6,"passed_time":0.006379744293,"remaining_time":0.9050122976}, +{"learn":[2999.831771],"iteration":7,"passed_time":0.007226196649,"remaining_time":0.8960483845}, +{"learn":[2922.61658],"iteration":8,"passed_time":0.007970444161,"remaining_time":0.8776344626}, +{"learn":[2850.066213],"iteration":9,"passed_time":0.009577961727,"remaining_time":0.948218211}, +{"learn":[2787.171725],"iteration":10,"passed_time":0.01262603521,"remaining_time":1.135195347}, +{"learn":[2712.631573],"iteration":11,"passed_time":0.01348774025,"remaining_time":1.110490614}, +{"learn":[2639.240862],"iteration":12,"passed_time":0.01435758583,"remaining_time":1.090072094}, +{"learn":[2580.881397],"iteration":13,"passed_time":0.01514015736,"remaining_time":1.066299654}, +{"learn":[2521.58662],"iteration":14,"passed_time":0.01593945222,"remaining_time":1.046690696}, +{"learn":[2464.607771],"iteration":15,"passed_time":0.01679922513,"remaining_time":1.033152345}, +{"learn":[2401.172352],"iteration":16,"passed_time":0.01760465147,"remaining_time":1.017963082}, +{"learn":[2342.65902],"iteration":17,"passed_time":0.01895445613,"remaining_time":1.034070884}, +{"learn":[2289.300237],"iteration":18,"passed_time":0.01983724873,"remaining_time":1.024228474}, +{"learn":[2235.975037],"iteration":19,"passed_time":0.02062393627,"remaining_time":1.010572877}, +{"learn":[2178.182547],"iteration":20,"passed_time":0.02148322277,"remaining_time":1.001527385}, +{"learn":[2125.421774],"iteration":21,"passed_time":0.02238767694,"remaining_time":0.995234002}, +{"learn":[2077.423716],"iteration":22,"passed_time":0.02325785973,"remaining_time":0.9879534331}, +{"learn":[2036.067952],"iteration":23,"passed_time":0.02491905798,"remaining_time":1.013375025}, +{"learn":[1991.236961],"iteration":24,"passed_time":0.02586931936,"remaining_time":1.008903455}, +{"learn":[1947.239582],"iteration":25,"passed_time":0.02797841234,"remaining_time":1.04811437}, +{"learn":[1898.356448],"iteration":26,"passed_time":0.02918941018,"remaining_time":1.051899856}, +{"learn":[1852.598884],"iteration":27,"passed_time":0.03012027329,"remaining_time":1.045603773}, +{"learn":[1805.98091],"iteration":28,"passed_time":0.0309115501,"remaining_time":1.035003971}, +{"learn":[1769.275215],"iteration":29,"passed_time":0.03213546825,"remaining_time":1.039046807}, +{"learn":[1732.261276],"iteration":30,"passed_time":0.03309363693,"remaining_time":1.034443038}, +{"learn":[1698.06551],"iteration":31,"passed_time":0.0342655676,"remaining_time":1.03653342}, +{"learn":[1662.095506],"iteration":32,"passed_time":0.03501587146,"remaining_time":1.026071143}, +{"learn":[1625.53386],"iteration":33,"passed_time":0.03763920806,"remaining_time":1.069396323}, +{"learn":[1589.035018],"iteration":34,"passed_time":0.03865344926,"remaining_time":1.065730815}, +{"learn":[1557.617707],"iteration":35,"passed_time":0.03933677219,"remaining_time":1.053351344}, +{"learn":[1527.889061],"iteration":36,"passed_time":0.04006173858,"remaining_time":1.042687953}, +{"learn":[1493.967086],"iteration":37,"passed_time":0.04122821077,"remaining_time":1.043724704}, +{"learn":[1465.168414],"iteration":38,"passed_time":0.04274831007,"remaining_time":1.053362204}, +{"learn":[1436.754665],"iteration":39,"passed_time":0.04507089726,"remaining_time":1.081701534}, +{"learn":[1409.060957],"iteration":40,"passed_time":0.04617657413,"remaining_time":1.080081331}, +{"learn":[1383.04592],"iteration":41,"passed_time":0.04712081619,"remaining_time":1.074803379}, +{"learn":[1352.152048],"iteration":42,"passed_time":0.04814847017,"remaining_time":1.071583394}, +{"learn":[1326.002392],"iteration":43,"passed_time":0.04912743904,"remaining_time":1.067405266}, +{"learn":[1298.795677],"iteration":44,"passed_time":0.05020640002,"remaining_time":1.065491378}, +{"learn":[1277.59462],"iteration":45,"passed_time":0.05115124636,"remaining_time":1.06083237}, +{"learn":[1253.978103],"iteration":46,"passed_time":0.05201088787,"remaining_time":1.054603748}, +{"learn":[1230.190699],"iteration":47,"passed_time":0.05301418505,"remaining_time":1.051448004}, +{"learn":[1203.967689],"iteration":48,"passed_time":0.05393893128,"remaining_time":1.046855585}, +{"learn":[1182.19517],"iteration":49,"passed_time":0.05471787462,"remaining_time":1.039639618}, +{"learn":[1164.258966],"iteration":50,"passed_time":0.05596917062,"remaining_time":1.041465547}, +{"learn":[1141.966488],"iteration":51,"passed_time":0.05831283469,"remaining_time":1.063087832}, +{"learn":[1120.815492],"iteration":52,"passed_time":0.05966087136,"remaining_time":1.066015947}, +{"learn":[1101.092758],"iteration":53,"passed_time":0.06060035252,"remaining_time":1.061628398}, +{"learn":[1085.964328],"iteration":54,"passed_time":0.06151738373,"remaining_time":1.056980502}, +{"learn":[1068.29696],"iteration":55,"passed_time":0.06252420369,"remaining_time":1.053979434}, +{"learn":[1050.669356],"iteration":56,"passed_time":0.06359205614,"remaining_time":1.052058052}, +{"learn":[1034.918083],"iteration":57,"passed_time":0.06452150985,"remaining_time":1.047918315}, +{"learn":[1015.911816],"iteration":58,"passed_time":0.06746622867,"remaining_time":1.076029173}, +{"learn":[996.1684752],"iteration":59,"passed_time":0.06828252391,"remaining_time":1.069759541}, +{"learn":[977.4736183],"iteration":60,"passed_time":0.06901458572,"remaining_time":1.062372065}, +{"learn":[961.5096755],"iteration":61,"passed_time":0.06977962986,"remaining_time":1.055698271}, +{"learn":[944.211636],"iteration":62,"passed_time":0.07093982136,"remaining_time":1.055089089}, +{"learn":[928.0566653],"iteration":63,"passed_time":0.07192675486,"remaining_time":1.05192879}, +{"learn":[915.6840108],"iteration":64,"passed_time":0.07346848545,"remaining_time":1.056815906}, +{"learn":[898.1121629],"iteration":65,"passed_time":0.0750791106,"remaining_time":1.062483171}, +{"learn":[882.9822656],"iteration":66,"passed_time":0.07619541706,"remaining_time":1.061049614}, +{"learn":[873.9766077],"iteration":67,"passed_time":0.07720082905,"remaining_time":1.058105481}, +{"learn":[859.858408],"iteration":68,"passed_time":0.07831632826,"remaining_time":1.056702922}, +{"learn":[843.5547062],"iteration":69,"passed_time":0.07936070629,"remaining_time":1.054363669}, +{"learn":[831.306257],"iteration":70,"passed_time":0.08027405021,"remaining_time":1.050346375}, +{"learn":[820.3099516],"iteration":71,"passed_time":0.08105374347,"remaining_time":1.044692694}, +{"learn":[807.1855686],"iteration":72,"passed_time":0.0818083884,"remaining_time":1.038854466}, +{"learn":[794.169211],"iteration":73,"passed_time":0.08263883886,"remaining_time":1.034102227}, +{"learn":[783.9636837],"iteration":74,"passed_time":0.08348626085,"remaining_time":1.029663884}, +{"learn":[776.4972272],"iteration":75,"passed_time":0.08414387027,"remaining_time":1.023012318}, +{"learn":[767.6959848],"iteration":76,"passed_time":0.08507209512,"remaining_time":1.019760309}, +{"learn":[755.1965137],"iteration":77,"passed_time":0.08599871366,"remaining_time":1.016548897}, +{"learn":[744.4263707],"iteration":78,"passed_time":0.08686327133,"remaining_time":1.012671809}, +{"learn":[737.761998],"iteration":79,"passed_time":0.08896217128,"remaining_time":1.02306497}, +{"learn":[729.7130391],"iteration":80,"passed_time":0.09008381336,"remaining_time":1.022062031}, +{"learn":[722.3074503],"iteration":81,"passed_time":0.09101888152,"remaining_time":1.018967478}, +{"learn":[716.8086447],"iteration":82,"passed_time":0.09379876739,"remaining_time":1.036306864}, +{"learn":[709.6708335],"iteration":83,"passed_time":0.09504194422,"remaining_time":1.036409773}, +{"learn":[699.0944538],"iteration":84,"passed_time":0.09583257759,"remaining_time":1.031609512}, +{"learn":[693.2289681],"iteration":85,"passed_time":0.09661009159,"remaining_time":1.026763066}, +{"learn":[685.4228057],"iteration":86,"passed_time":0.09729419862,"remaining_time":1.021029923}, +{"learn":[676.9562967],"iteration":87,"passed_time":0.09799564324,"remaining_time":1.015591212}, +{"learn":[670.2366575],"iteration":88,"passed_time":0.09864680426,"remaining_time":1.009744255}, +{"learn":[666.4112163],"iteration":89,"passed_time":0.09931420078,"remaining_time":1.004176919}, +{"learn":[659.6851751],"iteration":90,"passed_time":0.09997397808,"remaining_time":0.9986411656}, +{"learn":[650.5406821],"iteration":91,"passed_time":0.1006937071,"remaining_time":0.9938031094}, +{"learn":[642.437192],"iteration":92,"passed_time":0.1014564082,"remaining_time":0.9894727122}, +{"learn":[637.5808176],"iteration":93,"passed_time":0.1022581807,"remaining_time":0.9855948057}, +{"learn":[630.4765451],"iteration":94,"passed_time":0.1041053803,"remaining_time":0.9917407282}, +{"learn":[626.0489731],"iteration":95,"passed_time":0.1053464477,"remaining_time":0.9920123823}, +{"learn":[621.6103651],"iteration":96,"passed_time":0.1061802756,"remaining_time":0.9884617412}, +{"learn":[614.0547972],"iteration":97,"passed_time":0.1069295198,"remaining_time":0.984188029}, +{"learn":[607.9423586],"iteration":98,"passed_time":0.1077968581,"remaining_time":0.9810602948}, +{"learn":[600.4874848],"iteration":99,"passed_time":0.109047333,"remaining_time":0.9814259971}, +{"learn":[592.9412018],"iteration":100,"passed_time":0.1099874466,"remaining_time":0.978997173}, +{"learn":[588.1187919],"iteration":101,"passed_time":0.1107679158,"remaining_time":0.9751920426}, +{"learn":[581.0641089],"iteration":102,"passed_time":0.1116909634,"remaining_time":0.9726873224}, +{"learn":[574.2759513],"iteration":103,"passed_time":0.1124788766,"remaining_time":0.9690487834}, +{"learn":[569.9720728],"iteration":104,"passed_time":0.1130825786,"remaining_time":0.9638943601}, +{"learn":[566.190281],"iteration":105,"passed_time":0.1136667198,"remaining_time":0.9586608258}, +{"learn":[562.1953603],"iteration":106,"passed_time":0.1143580661,"remaining_time":0.9544089072}, +{"learn":[557.4002281],"iteration":107,"passed_time":0.1150058109,"remaining_time":0.9498628082}, +{"learn":[553.1702731],"iteration":108,"passed_time":0.1156636621,"remaining_time":0.9454708523}, +{"learn":[548.5889262],"iteration":109,"passed_time":0.1163139026,"remaining_time":0.941085212}, +{"learn":[543.4056498],"iteration":110,"passed_time":0.1170816865,"remaining_time":0.9377082819}, +{"learn":[538.4832325],"iteration":111,"passed_time":0.1181059918,"remaining_time":0.9364117923}, +{"learn":[535.2458573],"iteration":112,"passed_time":0.1202052731,"remaining_time":0.943558206}, +{"learn":[530.1384428],"iteration":113,"passed_time":0.1212157662,"remaining_time":0.9420804282}, +{"learn":[525.1436712],"iteration":114,"passed_time":0.1220923009,"remaining_time":0.9395798805}, +{"learn":[520.6876088],"iteration":115,"passed_time":0.1237231465,"remaining_time":0.9428557025}, +{"learn":[514.513666],"iteration":116,"passed_time":0.1245349917,"remaining_time":0.9398666466}, +{"learn":[509.7961508],"iteration":117,"passed_time":0.1253572738,"remaining_time":0.9369925043}, +{"learn":[506.6708641],"iteration":118,"passed_time":0.1261668066,"remaining_time":0.9340584588}, +{"learn":[502.5185268],"iteration":119,"passed_time":0.1268687134,"remaining_time":0.930370565}, +{"learn":[499.116907],"iteration":120,"passed_time":0.1275570721,"remaining_time":0.9266336066}, +{"learn":[494.894429],"iteration":121,"passed_time":0.1284537917,"remaining_time":0.9244461403}, +{"learn":[490.7216202],"iteration":122,"passed_time":0.1292312922,"remaining_time":0.9214296198}, +{"learn":[486.660548],"iteration":123,"passed_time":0.1298296745,"remaining_time":0.9171838294}, +{"learn":[484.8822804],"iteration":124,"passed_time":0.1304269163,"remaining_time":0.9129884138}, +{"learn":[480.6495083],"iteration":125,"passed_time":0.1311208787,"remaining_time":0.909521016}, +{"learn":[476.774821],"iteration":126,"passed_time":0.131746984,"remaining_time":0.9056308425}, +{"learn":[472.7333979],"iteration":127,"passed_time":0.1325146699,"remaining_time":0.9027561889}, +{"learn":[468.6643234],"iteration":128,"passed_time":0.1336600157,"remaining_time":0.9024641373}, +{"learn":[464.4447997],"iteration":129,"passed_time":0.1344089811,"remaining_time":0.899506258}, +{"learn":[460.6841863],"iteration":130,"passed_time":0.1368505171,"remaining_time":0.9078099188}, +{"learn":[456.2935225],"iteration":131,"passed_time":0.1376144517,"remaining_time":0.9049192731}, +{"learn":[453.1949553],"iteration":132,"passed_time":0.1384330625,"remaining_time":0.9024170318}, +{"learn":[450.8314019],"iteration":133,"passed_time":0.1392760705,"remaining_time":0.9000975897}, +{"learn":[447.9792497],"iteration":134,"passed_time":0.1401923834,"remaining_time":0.8982697161}, +{"learn":[444.1524308],"iteration":135,"passed_time":0.140999587,"remaining_time":0.8957620823}, +{"learn":[440.3004227],"iteration":136,"passed_time":0.1418121843,"remaining_time":0.8933132484}, +{"learn":[437.5089858],"iteration":137,"passed_time":0.143176518,"remaining_time":0.8943344823}, +{"learn":[434.4987658],"iteration":138,"passed_time":0.14393554,"remaining_time":0.8915719421}, +{"learn":[431.333326],"iteration":139,"passed_time":0.144626411,"remaining_time":0.8884193816}, +{"learn":[429.2145835],"iteration":140,"passed_time":0.1453053558,"remaining_time":0.8852290823}, +{"learn":[426.2960991],"iteration":141,"passed_time":0.1461218127,"remaining_time":0.8829050371}, +{"learn":[423.9875382],"iteration":142,"passed_time":0.1467827661,"remaining_time":0.8796701438}, +{"learn":[422.3177707],"iteration":143,"passed_time":0.1474829391,"remaining_time":0.8767041383}, +{"learn":[421.2376606],"iteration":144,"passed_time":0.1488152282,"remaining_time":0.8774966902}, +{"learn":[417.7776491],"iteration":145,"passed_time":0.1496361704,"remaining_time":0.875269106}, +{"learn":[416.1531592],"iteration":146,"passed_time":0.1507895577,"remaining_time":0.8749897465}, +{"learn":[412.1591115],"iteration":147,"passed_time":0.1517581139,"remaining_time":0.8736345473}, +{"learn":[409.1819197],"iteration":148,"passed_time":0.1525729274,"remaining_time":0.8714064511}, +{"learn":[406.5238175],"iteration":149,"passed_time":0.1534094862,"remaining_time":0.8693204218}, +{"learn":[404.1942423],"iteration":150,"passed_time":0.1544202217,"remaining_time":0.8682302532}, +{"learn":[400.2960048],"iteration":151,"passed_time":0.1552089037,"remaining_time":0.8659023049}, +{"learn":[398.2802551],"iteration":152,"passed_time":0.1559438698,"remaining_time":0.8632971091}, +{"learn":[395.9698537],"iteration":153,"passed_time":0.1569355442,"remaining_time":0.8621264313}, +{"learn":[393.9176761],"iteration":154,"passed_time":0.1578067928,"remaining_time":0.8603015479}, +{"learn":[390.6531762],"iteration":155,"passed_time":0.1585002058,"remaining_time":0.8575267547}, +{"learn":[387.9927685],"iteration":156,"passed_time":0.1591401561,"remaining_time":0.8544914116}, +{"learn":[386.1000681],"iteration":157,"passed_time":0.1598188221,"remaining_time":0.8516927104}, +{"learn":[382.8113362],"iteration":158,"passed_time":0.16056235,"remaining_time":0.8492637506}, +{"learn":[379.6496615],"iteration":159,"passed_time":0.1612830797,"remaining_time":0.8467361682}, +{"learn":[378.3378875],"iteration":160,"passed_time":0.1619334872,"remaining_time":0.8438645699}, +{"learn":[376.0755551],"iteration":161,"passed_time":0.1626037794,"remaining_time":0.8411232541}, +{"learn":[374.0972576],"iteration":162,"passed_time":0.1640452045,"remaining_time":0.8423670933}, +{"learn":[371.8931312],"iteration":163,"passed_time":0.1648493675,"remaining_time":0.8403297026}, +{"learn":[368.0549992],"iteration":164,"passed_time":0.1686946552,"remaining_time":0.8536971945}, +{"learn":[366.6124142],"iteration":165,"passed_time":0.1697738939,"remaining_time":0.8529604069}, +{"learn":[364.8857903],"iteration":166,"passed_time":0.1705805271,"remaining_time":0.8508597547}, +{"learn":[362.2252152],"iteration":167,"passed_time":0.1714434036,"remaining_time":0.8490530466}, +{"learn":[360.9085148],"iteration":168,"passed_time":0.1721592389,"remaining_time":0.8465344822}, +{"learn":[358.6451278],"iteration":169,"passed_time":0.1728521188,"remaining_time":0.8439250508}, +{"learn":[355.0984419],"iteration":170,"passed_time":0.173549607,"remaining_time":0.8413603753}, +{"learn":[354.0569022],"iteration":171,"passed_time":0.1742203571,"remaining_time":0.8386886959}, +{"learn":[351.6524348],"iteration":172,"passed_time":0.1749847973,"remaining_time":0.8364880196}, +{"learn":[349.2576632],"iteration":173,"passed_time":0.1756557277,"remaining_time":0.8338599486}, +{"learn":[347.7399362],"iteration":174,"passed_time":0.1762844384,"remaining_time":0.8310552096}, +{"learn":[346.3216673],"iteration":175,"passed_time":0.1769929656,"remaining_time":0.8286488844}, +{"learn":[345.3573698],"iteration":176,"passed_time":0.1776997615,"remaining_time":0.8262536934}, +{"learn":[344.3890702],"iteration":177,"passed_time":0.1783708311,"remaining_time":0.8237124897}, +{"learn":[342.0730746],"iteration":178,"passed_time":0.1791248675,"remaining_time":0.8215727162}, +{"learn":[340.2167814],"iteration":179,"passed_time":0.1800660735,"remaining_time":0.8203010016}, +{"learn":[338.6962004],"iteration":180,"passed_time":0.1810218012,"remaining_time":0.8190986475}, +{"learn":[337.5122711],"iteration":181,"passed_time":0.1841910726,"remaining_time":0.8278477878}, +{"learn":[334.5157453],"iteration":182,"passed_time":0.1849794569,"remaining_time":0.8258372474}, +{"learn":[333.3058789],"iteration":183,"passed_time":0.1860134062,"remaining_time":0.8249290186}, +{"learn":[332.8028753],"iteration":184,"passed_time":0.1868111823,"remaining_time":0.8229789923}, +{"learn":[331.0744585],"iteration":185,"passed_time":0.1876568189,"remaining_time":0.8212508094}, +{"learn":[329.8548976],"iteration":186,"passed_time":0.1889095958,"remaining_time":0.8213021466}, +{"learn":[327.869795],"iteration":187,"passed_time":0.1895959886,"remaining_time":0.8188933125}, +{"learn":[326.5184806],"iteration":188,"passed_time":0.1902634674,"remaining_time":0.8164215452}, +{"learn":[325.4357906],"iteration":189,"passed_time":0.1909502162,"remaining_time":0.8140509218}, +{"learn":[323.3430059],"iteration":190,"passed_time":0.1916044866,"remaining_time":0.8115603648}, +{"learn":[321.5208687],"iteration":191,"passed_time":0.1923381188,"remaining_time":0.8094229165}, +{"learn":[319.5760156],"iteration":192,"passed_time":0.1930210732,"remaining_time":0.8070881142}, +{"learn":[317.7506047],"iteration":193,"passed_time":0.1936652378,"remaining_time":0.8046091836}, +{"learn":[316.1839605],"iteration":194,"passed_time":0.1951399717,"remaining_time":0.8055778321}, +{"learn":[314.198558],"iteration":195,"passed_time":0.1958938176,"remaining_time":0.8035644355}, +{"learn":[312.3917238],"iteration":196,"passed_time":0.1986409156,"remaining_time":0.8096886051}, +{"learn":[311.3355559],"iteration":197,"passed_time":0.1995943533,"remaining_time":0.8084579359}, +{"learn":[310.3982421],"iteration":198,"passed_time":0.2004397238,"remaining_time":0.8067950693}, +{"learn":[308.6559812],"iteration":199,"passed_time":0.201392851,"remaining_time":0.805571404}, +{"learn":[307.3273653],"iteration":200,"passed_time":0.2021893442,"remaining_time":0.8037277909}, +{"learn":[305.8939696],"iteration":201,"passed_time":0.2029950055,"remaining_time":0.8019307644}, +{"learn":[303.8927472],"iteration":202,"passed_time":0.2049108604,"remaining_time":0.8045022449}, +{"learn":[302.7532426],"iteration":203,"passed_time":0.2057360397,"remaining_time":0.8027739586}, +{"learn":[301.1496647],"iteration":204,"passed_time":0.2064110995,"remaining_time":0.8004723129}, +{"learn":[300.1434351],"iteration":205,"passed_time":0.2070538676,"remaining_time":0.7980619944}, +{"learn":[299.3006517],"iteration":206,"passed_time":0.2076637145,"remaining_time":0.7955426358}, +{"learn":[298.4622687],"iteration":207,"passed_time":0.2083234234,"remaining_time":0.7932314969}, +{"learn":[297.1605991],"iteration":208,"passed_time":0.2089827994,"remaining_time":0.7909349011}, +{"learn":[295.8648262],"iteration":209,"passed_time":0.2096593478,"remaining_time":0.7887184987}, +{"learn":[293.5810869],"iteration":210,"passed_time":0.2109301784,"remaining_time":0.7887389136}, +{"learn":[292.8127275],"iteration":211,"passed_time":0.2117513513,"remaining_time":0.7870757776}, +{"learn":[292.0236736],"iteration":212,"passed_time":0.2134172136,"remaining_time":0.7885415358}, +{"learn":[290.2514839],"iteration":213,"passed_time":0.21481168,"remaining_time":0.7889812171}, +{"learn":[288.2243426],"iteration":214,"passed_time":0.2159664692,"remaining_time":0.7885287366}, +{"learn":[286.8670898],"iteration":215,"passed_time":0.2169127129,"remaining_time":0.7873128097}, +{"learn":[285.5220573],"iteration":216,"passed_time":0.2191779599,"remaining_time":0.7908587216}, +{"learn":[283.7910197],"iteration":217,"passed_time":0.2201545739,"remaining_time":0.7897287927}, +{"learn":[282.2997544],"iteration":218,"passed_time":0.220898215,"remaining_time":0.7877694335}, +{"learn":[280.8603639],"iteration":219,"passed_time":0.2218471767,"remaining_time":0.786549081}, +{"learn":[279.7272082],"iteration":220,"passed_time":0.2227049188,"remaining_time":0.785009646}, +{"learn":[278.1697266],"iteration":221,"passed_time":0.2235466932,"remaining_time":0.7834203934}, +{"learn":[276.9547641],"iteration":222,"passed_time":0.2243624643,"remaining_time":0.7817472411}, +{"learn":[275.9620385],"iteration":223,"passed_time":0.2252452605,"remaining_time":0.7803139381}, +{"learn":[274.6320485],"iteration":224,"passed_time":0.2271930411,"remaining_time":0.7825538084}, +{"learn":[273.6185828],"iteration":225,"passed_time":0.2289905386,"remaining_time":0.7842419332}, +{"learn":[272.4580245],"iteration":226,"passed_time":0.2298270657,"remaining_time":0.7826269683}, +{"learn":[271.4426343],"iteration":227,"passed_time":0.2306176122,"remaining_time":0.7808631431}, +{"learn":[270.7657401],"iteration":228,"passed_time":0.2314688287,"remaining_time":0.7793120826}, +{"learn":[269.4458688],"iteration":229,"passed_time":0.2323498205,"remaining_time":0.7778667905}, +{"learn":[268.5788803],"iteration":230,"passed_time":0.2331506929,"remaining_time":0.7761596659}, +{"learn":[267.0961414],"iteration":231,"passed_time":0.2340473562,"remaining_time":0.7747774549}, +{"learn":[266.0051701],"iteration":232,"passed_time":0.2348947372,"remaining_time":0.7732371821}, +{"learn":[264.5156592],"iteration":233,"passed_time":0.2355969496,"remaining_time":0.7712276212}, +{"learn":[263.1693637],"iteration":234,"passed_time":0.2363747637,"remaining_time":0.7694752947}, +{"learn":[262.2827066],"iteration":235,"passed_time":0.23712222,"remaining_time":0.7676329494}, +{"learn":[261.9286628],"iteration":236,"passed_time":0.2378122341,"remaining_time":0.7656149141}, +{"learn":[261.482238],"iteration":237,"passed_time":0.2385981533,"remaining_time":0.7639150957}, +{"learn":[260.3457298],"iteration":238,"passed_time":0.2394350728,"remaining_time":0.7623853154}, +{"learn":[259.7332775],"iteration":239,"passed_time":0.2401456678,"remaining_time":0.7604612813}, +{"learn":[259.3165229],"iteration":240,"passed_time":0.2418544466,"remaining_time":0.7616909749}, +{"learn":[258.7312612],"iteration":241,"passed_time":0.2431756794,"remaining_time":0.7616824999}, +{"learn":[257.2398668],"iteration":242,"passed_time":0.2454229297,"remaining_time":0.7645479744}, +{"learn":[256.7958772],"iteration":243,"passed_time":0.2462483568,"remaining_time":0.7629662203}, +{"learn":[256.0899879],"iteration":244,"passed_time":0.2470613835,"remaining_time":0.7613524267}, +{"learn":[255.4514517],"iteration":245,"passed_time":0.2478312437,"remaining_time":0.7596128365}, +{"learn":[253.7741395],"iteration":246,"passed_time":0.2486206685,"remaining_time":0.7579407426}, +{"learn":[253.4455383],"iteration":247,"passed_time":0.2495069757,"remaining_time":0.7565695393}, +{"learn":[252.1779053],"iteration":248,"passed_time":0.2513492082,"remaining_time":0.758085363}, +{"learn":[251.5443517],"iteration":249,"passed_time":0.2521588332,"remaining_time":0.7564764997}, +{"learn":[250.6149484],"iteration":250,"passed_time":0.2528892022,"remaining_time":0.7546374998}, +{"learn":[250.0460461],"iteration":251,"passed_time":0.2536917627,"remaining_time":0.7530215815}, +{"learn":[249.2557283],"iteration":252,"passed_time":0.2545085832,"remaining_time":0.7514541963}, +{"learn":[248.5946517],"iteration":253,"passed_time":0.2554606556,"remaining_time":0.7502899571}, +{"learn":[247.3165644],"iteration":254,"passed_time":0.2564481147,"remaining_time":0.7492307666}, +{"learn":[246.0656813],"iteration":255,"passed_time":0.2581274571,"remaining_time":0.7501829222}, +{"learn":[245.4258742],"iteration":256,"passed_time":0.2593826177,"remaining_time":0.7498882684}, +{"learn":[244.8982327],"iteration":257,"passed_time":0.2605823051,"remaining_time":0.7494266295}, +{"learn":[244.051275],"iteration":258,"passed_time":0.2613910876,"remaining_time":0.7478409109}, +{"learn":[243.1550562],"iteration":259,"passed_time":0.2621577772,"remaining_time":0.746141366}, +{"learn":[242.3001204],"iteration":260,"passed_time":0.2628899972,"remaining_time":0.7443513713}, +{"learn":[241.1597182],"iteration":261,"passed_time":0.2637295343,"remaining_time":0.7428717416}, +{"learn":[240.6731094],"iteration":262,"passed_time":0.2649130496,"remaining_time":0.7423609032}, +{"learn":[240.2473054],"iteration":263,"passed_time":0.2656740087,"remaining_time":0.7406669333}, +{"learn":[239.1251422],"iteration":264,"passed_time":0.2664264829,"remaining_time":0.7389564714}, +{"learn":[237.9541634],"iteration":265,"passed_time":0.2676166004,"remaining_time":0.7384608448}, +{"learn":[236.9556174],"iteration":266,"passed_time":0.268605136,"remaining_time":0.7374066092}, +{"learn":[235.5577891],"iteration":267,"passed_time":0.2696458153,"remaining_time":0.7364952866}, +{"learn":[234.9502464],"iteration":268,"passed_time":0.2707437747,"remaining_time":0.735738659}, +{"learn":[234.0494811],"iteration":269,"passed_time":0.2722325346,"remaining_time":0.7360361121}, +{"learn":[233.1372456],"iteration":270,"passed_time":0.273657335,"remaining_time":0.7361483293}, +{"learn":[232.8750573],"iteration":271,"passed_time":0.2747687488,"remaining_time":0.7354104748}, +{"learn":[232.2241787],"iteration":272,"passed_time":0.2809607245,"remaining_time":0.7481994386}, +{"learn":[231.5673968],"iteration":273,"passed_time":0.2820284534,"remaining_time":0.7472724714}, +{"learn":[230.7525811],"iteration":274,"passed_time":0.2829181004,"remaining_time":0.7458749918}, +{"learn":[230.2074729],"iteration":275,"passed_time":0.2838759307,"remaining_time":0.7446600502}, +{"learn":[229.9528668],"iteration":276,"passed_time":0.284752897,"remaining_time":0.7432359009}, +{"learn":[229.7334938],"iteration":277,"passed_time":0.2856654939,"remaining_time":0.7419082251}, +{"learn":[228.6220546],"iteration":278,"passed_time":0.2893374723,"remaining_time":0.7477143997}, +{"learn":[228.4356848],"iteration":279,"passed_time":0.2908118078,"remaining_time":0.7478017916}, +{"learn":[227.5121516],"iteration":280,"passed_time":0.2918951437,"remaining_time":0.7468776097}, +{"learn":[226.5496263],"iteration":281,"passed_time":0.2940369876,"remaining_time":0.7486473655}, +{"learn":[225.5877827],"iteration":282,"passed_time":0.2952599927,"remaining_time":0.7480615363}, +{"learn":[224.7683111],"iteration":283,"passed_time":0.2962847004,"remaining_time":0.7469712869}, +{"learn":[224.0297309],"iteration":284,"passed_time":0.2974093753,"remaining_time":0.7461322924}, +{"learn":[223.3259847],"iteration":285,"passed_time":0.2984135527,"remaining_time":0.7449904778}, +{"learn":[222.6383996],"iteration":286,"passed_time":0.2993872237,"remaining_time":0.7437738345}, +{"learn":[221.9632532],"iteration":287,"passed_time":0.300281409,"remaining_time":0.7423623723}, +{"learn":[221.2404351],"iteration":288,"passed_time":0.3012657456,"remaining_time":0.7411762806}, +{"learn":[220.5276791],"iteration":289,"passed_time":0.3042906053,"remaining_time":0.7449873439}, +{"learn":[219.5688789],"iteration":290,"passed_time":0.307486542,"remaining_time":0.7491682414}, +{"learn":[218.5386855],"iteration":291,"passed_time":0.3084576263,"remaining_time":0.7479041077}, +{"learn":[217.8530368],"iteration":292,"passed_time":0.3097074442,"remaining_time":0.7473145497}, +{"learn":[216.6832815],"iteration":293,"passed_time":0.3118118155,"remaining_time":0.7487725908}, +{"learn":[216.2709057],"iteration":294,"passed_time":0.3126776625,"remaining_time":0.7472466172}, +{"learn":[215.438626],"iteration":295,"passed_time":0.3133737858,"remaining_time":0.7453214364}, +{"learn":[214.5969252],"iteration":296,"passed_time":0.3140354827,"remaining_time":0.743323045}, +{"learn":[214.4329592],"iteration":297,"passed_time":0.3147049837,"remaining_time":0.7413520086}, +{"learn":[213.7404229],"iteration":298,"passed_time":0.315388698,"remaining_time":0.739423001}, +{"learn":[213.2976718],"iteration":299,"passed_time":0.3161040187,"remaining_time":0.7375760435}, +{"learn":[213.0368616],"iteration":300,"passed_time":0.3167972953,"remaining_time":0.7356854134}, +{"learn":[212.8299923],"iteration":301,"passed_time":0.3174611057,"remaining_time":0.7337346085}, +{"learn":[212.2011472],"iteration":302,"passed_time":0.3181863804,"remaining_time":0.731933687}, +{"learn":[211.9407753],"iteration":303,"passed_time":0.3191434385,"remaining_time":0.7306705039}, +{"learn":[210.7295623],"iteration":304,"passed_time":0.3198900116,"remaining_time":0.7289296986}, +{"learn":[210.1997047],"iteration":305,"passed_time":0.3219810107,"remaining_time":0.7302445144}, +{"learn":[209.6083914],"iteration":306,"passed_time":0.3230898264,"remaining_time":0.7293200317}, +{"learn":[209.0600739],"iteration":307,"passed_time":0.3241701018,"remaining_time":0.7283302286}, +{"learn":[208.8628804],"iteration":308,"passed_time":0.3251752652,"remaining_time":0.7271718714}, +{"learn":[207.9809195],"iteration":309,"passed_time":0.3260997899,"remaining_time":0.7258350163}, +{"learn":[207.7482659],"iteration":310,"passed_time":0.327005614,"remaining_time":0.7244593827}, +{"learn":[207.6323486],"iteration":311,"passed_time":0.327890088,"remaining_time":0.7230396813}, +{"learn":[206.190741],"iteration":312,"passed_time":0.3311223909,"remaining_time":0.7267766217}, +{"learn":[205.6996807],"iteration":313,"passed_time":0.3447512654,"remaining_time":0.7531827008}, +{"learn":[205.4047318],"iteration":314,"passed_time":0.3458562999,"remaining_time":0.7521002077}, +{"learn":[205.2218503],"iteration":315,"passed_time":0.3467822586,"remaining_time":0.7506299521}, +{"learn":[204.241677],"iteration":316,"passed_time":0.347713184,"remaining_time":0.7491738318}, +{"learn":[203.9209904],"iteration":317,"passed_time":0.3488678205,"remaining_time":0.7482007975}, +{"learn":[203.7444036],"iteration":318,"passed_time":0.3496918347,"remaining_time":0.7465208132}, +{"learn":[202.7247697],"iteration":319,"passed_time":0.3504827725,"remaining_time":0.7447758916}, +{"learn":[202.1082808],"iteration":320,"passed_time":0.35223312,"remaining_time":0.7450663192}, +{"learn":[201.9474572],"iteration":321,"passed_time":0.3540688168,"remaining_time":0.7455237821}, +{"learn":[201.7857796],"iteration":322,"passed_time":0.3550448693,"remaining_time":0.7441652525}, +{"learn":[201.4609255],"iteration":323,"passed_time":0.3560183352,"remaining_time":0.7428036871}, +{"learn":[201.2312686],"iteration":324,"passed_time":0.3569252176,"remaining_time":0.7413062211}, +{"learn":[200.5681497],"iteration":325,"passed_time":0.357679359,"remaining_time":0.7394965888}, +{"learn":[200.4091687],"iteration":326,"passed_time":0.3583108038,"remaining_time":0.7374408897}, +{"learn":[199.5892878],"iteration":327,"passed_time":0.3589833849,"remaining_time":0.7354781545}, +{"learn":[199.4371002],"iteration":328,"passed_time":0.3596601337,"remaining_time":0.7335317621}, +{"learn":[198.3931701],"iteration":329,"passed_time":0.3603550598,"remaining_time":0.7316299699}, +{"learn":[197.9225317],"iteration":330,"passed_time":0.3610283304,"remaining_time":0.7296917009}, +{"learn":[197.3986823],"iteration":331,"passed_time":0.3617176106,"remaining_time":0.7277932648}, +{"learn":[197.0484135],"iteration":332,"passed_time":0.3624065477,"remaining_time":0.7259014033}, +{"learn":[195.7574594],"iteration":333,"passed_time":0.3630885776,"remaining_time":0.7240029722}, +{"learn":[195.6491527],"iteration":334,"passed_time":0.3637290681,"remaining_time":0.7220293441}, +{"learn":[195.4817842],"iteration":335,"passed_time":0.3643931234,"remaining_time":0.7201102201}, +{"learn":[194.7880145],"iteration":336,"passed_time":0.3656790473,"remaining_time":0.7194219832}, +{"learn":[194.4113957],"iteration":337,"passed_time":0.3672381382,"remaining_time":0.7192652293}, +{"learn":[194.0318356],"iteration":338,"passed_time":0.3683302396,"remaining_time":0.7181896412}, +{"learn":[193.5284278],"iteration":339,"passed_time":0.3694761199,"remaining_time":0.7172183504}, +{"learn":[192.5991816],"iteration":340,"passed_time":0.3705181374,"remaining_time":0.7160453154}, +{"learn":[192.1031854],"iteration":341,"passed_time":0.3713356174,"remaining_time":0.7144410416}, +{"learn":[191.5491337],"iteration":342,"passed_time":0.3721136723,"remaining_time":0.7127658387}, +{"learn":[191.1828464],"iteration":343,"passed_time":0.3728777941,"remaining_time":0.7110692818}, +{"learn":[189.8709412],"iteration":344,"passed_time":0.3737339332,"remaining_time":0.7095528296}, +{"learn":[189.2616361],"iteration":345,"passed_time":0.3745009739,"remaining_time":0.707871783}, +{"learn":[188.4713385],"iteration":346,"passed_time":0.3752887219,"remaining_time":0.7062349723}, +{"learn":[188.3002161],"iteration":347,"passed_time":0.3766492583,"remaining_time":0.7056761966}, +{"learn":[187.7644675],"iteration":348,"passed_time":0.3776280317,"remaining_time":0.7044007124}, +{"learn":[187.3059385],"iteration":349,"passed_time":0.3784313461,"remaining_time":0.7028010713}, +{"learn":[187.1764166],"iteration":350,"passed_time":0.3793119715,"remaining_time":0.701348916}, +{"learn":[186.7702198],"iteration":351,"passed_time":0.3801888043,"remaining_time":0.699893026}, +{"learn":[186.2233832],"iteration":352,"passed_time":0.3808687543,"remaining_time":0.6980795582}, +{"learn":[186.1274261],"iteration":353,"passed_time":0.3818199536,"remaining_time":0.696767486}, +{"learn":[185.636063],"iteration":354,"passed_time":0.3827129247,"remaining_time":0.6953516519}, +{"learn":[185.1812839],"iteration":355,"passed_time":0.3855361857,"remaining_time":0.697430628}, +{"learn":[185.068081],"iteration":356,"passed_time":0.3865215762,"remaining_time":0.6961719146}, +{"learn":[183.8599331],"iteration":357,"passed_time":0.3875010255,"remaining_time":0.6949040736}, +{"learn":[183.6914997],"iteration":358,"passed_time":0.3883361218,"remaining_time":0.6933800949}, +{"learn":[183.3436455],"iteration":359,"passed_time":0.3891532538,"remaining_time":0.6918280068}, +{"learn":[182.9215853],"iteration":360,"passed_time":0.3899197042,"remaining_time":0.6901902797}, +{"learn":[182.3247868],"iteration":361,"passed_time":0.3907271634,"remaining_time":0.6886296416}, +{"learn":[181.5896693],"iteration":362,"passed_time":0.3915431795,"remaining_time":0.687088169}, +{"learn":[180.8062462],"iteration":363,"passed_time":0.3926395389,"remaining_time":0.6860405131}, +{"learn":[180.1039249],"iteration":364,"passed_time":0.3936535825,"remaining_time":0.6848493833}, +{"learn":[179.9705744],"iteration":365,"passed_time":0.3947294249,"remaining_time":0.6837662715}, +{"learn":[179.8880634],"iteration":366,"passed_time":0.3957195952,"remaining_time":0.6825354326}, +{"learn":[179.5510833],"iteration":367,"passed_time":0.3982340713,"remaining_time":0.6839237312}, +{"learn":[178.6255684],"iteration":368,"passed_time":0.3993732693,"remaining_time":0.6829391136}, +{"learn":[178.0825683],"iteration":369,"passed_time":0.4007687134,"remaining_time":0.6823899715}, +{"learn":[177.5440763],"iteration":370,"passed_time":0.401925406,"remaining_time":0.6814314835}, +{"learn":[177.0171958],"iteration":371,"passed_time":0.402820422,"remaining_time":0.6800301748}, +{"learn":[176.7896567],"iteration":372,"passed_time":0.4037089473,"remaining_time":0.6786206701}, +{"learn":[176.149964],"iteration":373,"passed_time":0.404497144,"remaining_time":0.6770460217}, +{"learn":[175.3705414],"iteration":374,"passed_time":0.4053048001,"remaining_time":0.6755080002}, +{"learn":[175.0059888],"iteration":375,"passed_time":0.4060908859,"remaining_time":0.6739380659}, +{"learn":[174.5529551],"iteration":376,"passed_time":0.4068954288,"remaining_time":0.6724027908}, +{"learn":[174.4630831],"iteration":377,"passed_time":0.4077297905,"remaining_time":0.6709204489}, +{"learn":[174.2530752],"iteration":378,"passed_time":0.4085539959,"remaining_time":0.6694248851}, +{"learn":[174.1312762],"iteration":379,"passed_time":0.4093783982,"remaining_time":0.667933176}, +{"learn":[173.0735198],"iteration":380,"passed_time":0.4101689514,"remaining_time":0.6663899762}, +{"learn":[172.9912893],"iteration":381,"passed_time":0.4110368887,"remaining_time":0.664975909}, +{"learn":[172.640672],"iteration":382,"passed_time":0.4120962987,"remaining_time":0.6638731496}, +{"learn":[172.5681628],"iteration":383,"passed_time":0.4130831674,"remaining_time":0.6626542477}, +{"learn":[172.4643172],"iteration":384,"passed_time":0.4150996535,"remaining_time":0.6630812647}, +{"learn":[171.7777603],"iteration":385,"passed_time":0.4163728924,"remaining_time":0.6623133573}, +{"learn":[171.6906621],"iteration":386,"passed_time":0.4173637247,"remaining_time":0.6610955123}, +{"learn":[170.6872263],"iteration":387,"passed_time":0.4184141477,"remaining_time":0.6599728309}, +{"learn":[170.514087],"iteration":388,"passed_time":0.4194147386,"remaining_time":0.6587722501}, +{"learn":[170.3233994],"iteration":389,"passed_time":0.4202317852,"remaining_time":0.6572856127}, +{"learn":[169.8846439],"iteration":390,"passed_time":0.4210836551,"remaining_time":0.6558566392}, +{"learn":[169.529393],"iteration":391,"passed_time":0.4218376313,"remaining_time":0.654278775}, +{"learn":[168.9009519],"iteration":392,"passed_time":0.4226273698,"remaining_time":0.6527603396}, +{"learn":[168.2725977],"iteration":393,"passed_time":0.423757208,"remaining_time":0.6517687006}, +{"learn":[168.1686093],"iteration":394,"passed_time":0.4246981313,"remaining_time":0.6504870112}, +{"learn":[168.0934607],"iteration":395,"passed_time":0.4256138378,"remaining_time":0.6491685809}, +{"learn":[167.508123],"iteration":396,"passed_time":0.4263570627,"remaining_time":0.6475901984}, +{"learn":[167.4163222],"iteration":397,"passed_time":0.4275014107,"remaining_time":0.6466227368}, +{"learn":[166.9013374],"iteration":398,"passed_time":0.4283880469,"remaining_time":0.645266206}, +{"learn":[166.6137216],"iteration":399,"passed_time":0.4292311225,"remaining_time":0.6438466838}, +{"learn":[165.7003187],"iteration":400,"passed_time":0.4302606901,"remaining_time":0.6427086119}, +{"learn":[165.5639131],"iteration":401,"passed_time":0.43119758,"remaining_time":0.641433216}, +{"learn":[164.9306567],"iteration":402,"passed_time":0.4319544195,"remaining_time":0.6398927753}, +{"learn":[164.8399144],"iteration":403,"passed_time":0.4328111105,"remaining_time":0.6385035194}, +{"learn":[164.0876912],"iteration":404,"passed_time":0.4335248597,"remaining_time":0.6369068927}, +{"learn":[163.5523537],"iteration":405,"passed_time":0.4342113718,"remaining_time":0.6352747656}, +{"learn":[163.2875856],"iteration":406,"passed_time":0.4349497346,"remaining_time":0.633722832}, +{"learn":[162.998686],"iteration":407,"passed_time":0.4356655826,"remaining_time":0.6321422179}, +{"learn":[162.5089806],"iteration":408,"passed_time":0.4363218624,"remaining_time":0.6304797572}, +{"learn":[162.2376064],"iteration":409,"passed_time":0.4369614566,"remaining_time":0.6287981937}, +{"learn":[161.6910172],"iteration":410,"passed_time":0.4376030941,"remaining_time":0.6271246287}, +{"learn":[161.5034709],"iteration":411,"passed_time":0.4382657137,"remaining_time":0.6254860185}, +{"learn":[161.3250696],"iteration":412,"passed_time":0.4389702915,"remaining_time":0.6239117702}, +{"learn":[160.6313335],"iteration":413,"passed_time":0.4396917301,"remaining_time":0.622365589}, +{"learn":[160.3742082],"iteration":414,"passed_time":0.4403315544,"remaining_time":0.6207083357}, +{"learn":[159.7341574],"iteration":415,"passed_time":0.4410293145,"remaining_time":0.619137307}, +{"learn":[159.664954],"iteration":416,"passed_time":0.4416636109,"remaining_time":0.6174817389}, +{"learn":[159.5420211],"iteration":417,"passed_time":0.443169979,"remaining_time":0.6170452818}, +{"learn":[159.4801209],"iteration":418,"passed_time":0.4439697464,"remaining_time":0.6156239205}, +{"learn":[159.3536466],"iteration":419,"passed_time":0.4468687852,"remaining_time":0.6171045128}, +{"learn":[159.2770295],"iteration":420,"passed_time":0.4477798932,"remaining_time":0.6158303044}, +{"learn":[158.561971],"iteration":421,"passed_time":0.4486551086,"remaining_time":0.6145086558}, +{"learn":[157.9686295],"iteration":422,"passed_time":0.4493666775,"remaining_time":0.6129658934}, +{"learn":[157.8587884],"iteration":423,"passed_time":0.4501461375,"remaining_time":0.6115192812}, +{"learn":[157.3191629],"iteration":424,"passed_time":0.4509778631,"remaining_time":0.6101465207}, +{"learn":[156.8336449],"iteration":425,"passed_time":0.4517793885,"remaining_time":0.608735608}, +{"learn":[156.4918462],"iteration":426,"passed_time":0.452599052,"remaining_time":0.6073518895}, +{"learn":[156.145352],"iteration":427,"passed_time":0.4537216324,"remaining_time":0.6063756396}, +{"learn":[156.0697038],"iteration":428,"passed_time":0.4546796832,"remaining_time":0.6051797182}, +{"learn":[155.9134811],"iteration":429,"passed_time":0.4555826276,"remaining_time":0.6039118551}, +{"learn":[155.678063],"iteration":430,"passed_time":0.4563601074,"remaining_time":0.602480049}, +{"learn":[155.4965466],"iteration":431,"passed_time":0.4570877359,"remaining_time":0.6009857268}, +{"learn":[155.0397607],"iteration":432,"passed_time":0.458441846,"remaining_time":0.6003153041}, +{"learn":[154.6821414],"iteration":433,"passed_time":0.4592434515,"remaining_time":0.5989211833}, +{"learn":[154.6232933],"iteration":434,"passed_time":0.4600737603,"remaining_time":0.5975670679}, +{"learn":[154.1938736],"iteration":435,"passed_time":0.4623943519,"remaining_time":0.5981431525}, +{"learn":[153.8281113],"iteration":436,"passed_time":0.4634632502,"remaining_time":0.5970933864}, +{"learn":[153.3348882],"iteration":437,"passed_time":0.464448738,"remaining_time":0.5959365086}, +{"learn":[153.2388265],"iteration":438,"passed_time":0.465294778,"remaining_time":0.5946022106}, +{"learn":[153.1804252],"iteration":439,"passed_time":0.4686570389,"remaining_time":0.596472595}, +{"learn":[153.0976746],"iteration":440,"passed_time":0.4696792394,"remaining_time":0.5953530495}, +{"learn":[152.7513308],"iteration":441,"passed_time":0.470383463,"remaining_time":0.5938325166}, +{"learn":[152.3535668],"iteration":442,"passed_time":0.4710394725,"remaining_time":0.5922550478}, +{"learn":[152.110953],"iteration":443,"passed_time":0.4717235831,"remaining_time":0.5907169193}, +{"learn":[151.8156996],"iteration":444,"passed_time":0.4724359026,"remaining_time":0.5892178111}, +{"learn":[151.2831641],"iteration":445,"passed_time":0.4731152213,"remaining_time":0.5876812391}, +{"learn":[150.8180434],"iteration":446,"passed_time":0.4762209081,"remaining_time":0.589150251}, +{"learn":[150.7537961],"iteration":447,"passed_time":0.4782438064,"remaining_time":0.58926469}, +{"learn":[150.2536504],"iteration":448,"passed_time":0.4791158351,"remaining_time":0.5879572943}, +{"learn":[149.8562556],"iteration":449,"passed_time":0.4799703899,"remaining_time":0.5866304766}, +{"learn":[149.4497248],"iteration":450,"passed_time":0.4807549488,"remaining_time":0.5852205474}, +{"learn":[149.2791359],"iteration":451,"passed_time":0.4815086729,"remaining_time":0.5837760016}, +{"learn":[148.8058612],"iteration":452,"passed_time":0.4823637351,"remaining_time":0.5824568722}, +{"learn":[148.419447],"iteration":453,"passed_time":0.4831207633,"remaining_time":0.5810218872}, +{"learn":[148.0003908],"iteration":454,"passed_time":0.4838987128,"remaining_time":0.5796149417}, +{"learn":[147.1557482],"iteration":455,"passed_time":0.4847178899,"remaining_time":0.5782599388}, +{"learn":[146.7813221],"iteration":456,"passed_time":0.4855422462,"remaining_time":0.5769134348}, +{"learn":[146.63709],"iteration":457,"passed_time":0.4863422034,"remaining_time":0.5755403368}, +{"learn":[145.8417458],"iteration":458,"passed_time":0.4872297879,"remaining_time":0.574273018}, +{"learn":[145.7921029],"iteration":459,"passed_time":0.4880436482,"remaining_time":0.5729208044}, +{"learn":[145.69582],"iteration":460,"passed_time":0.4897921187,"remaining_time":0.5726636703}, +{"learn":[145.6319971],"iteration":461,"passed_time":0.4908785476,"remaining_time":0.5716291312}, +{"learn":[145.5926065],"iteration":462,"passed_time":0.4926839153,"remaining_time":0.5714282128}, +{"learn":[145.2514255],"iteration":463,"passed_time":0.4934604526,"remaining_time":0.5700319021}, +{"learn":[145.0422767],"iteration":464,"passed_time":0.4942343327,"remaining_time":0.5686352}, +{"learn":[144.6941249],"iteration":465,"passed_time":0.4950155632,"remaining_time":0.5672495939}, +{"learn":[144.1409204],"iteration":466,"passed_time":0.4980094372,"remaining_time":0.5683919272}, +{"learn":[144.0384521],"iteration":467,"passed_time":0.4995867763,"remaining_time":0.5679063354}, +{"learn":[143.9539897],"iteration":468,"passed_time":0.5004117041,"remaining_time":0.5665642109}, +{"learn":[143.7129095],"iteration":469,"passed_time":0.5012103338,"remaining_time":0.5651946318}, +{"learn":[143.6364716],"iteration":470,"passed_time":0.5031409272,"remaining_time":0.5650988333}, +{"learn":[143.5557396],"iteration":471,"passed_time":0.5039361277,"remaining_time":0.5637251598}, +{"learn":[142.9670765],"iteration":472,"passed_time":0.5047600493,"remaining_time":0.5623859323}, +{"learn":[142.5645332],"iteration":473,"passed_time":0.505852198,"remaining_time":0.561346532}, +{"learn":[141.787664],"iteration":474,"passed_time":0.5081067339,"remaining_time":0.5615916532}, +{"learn":[141.3298725],"iteration":475,"passed_time":0.5092348561,"remaining_time":0.5605862701}, +{"learn":[141.2575251],"iteration":476,"passed_time":0.5120181971,"remaining_time":0.561395214}, +{"learn":[141.1857064],"iteration":477,"passed_time":0.5129923975,"remaining_time":0.5602134551}, +{"learn":[140.7815123],"iteration":478,"passed_time":0.5143081769,"remaining_time":0.5594040922}, +{"learn":[140.6905512],"iteration":479,"passed_time":0.5151851563,"remaining_time":0.5581172527}, +{"learn":[140.6240735],"iteration":480,"passed_time":0.5161367872,"remaining_time":0.5569126665}, +{"learn":[140.1605819],"iteration":481,"passed_time":0.516989741,"remaining_time":0.5556030827}, +{"learn":[139.9923119],"iteration":482,"passed_time":0.5178670335,"remaining_time":0.5543214417}, +{"learn":[139.5946379],"iteration":483,"passed_time":0.5187664091,"remaining_time":0.5530650147}, +{"learn":[139.5477815],"iteration":484,"passed_time":0.5196287067,"remaining_time":0.5517706886}, +{"learn":[139.0551136],"iteration":485,"passed_time":0.5214534517,"remaining_time":0.5514960374}, +{"learn":[139.0083275],"iteration":486,"passed_time":0.5226149526,"remaining_time":0.5505163669}, +{"learn":[138.8115081],"iteration":487,"passed_time":0.5238705758,"remaining_time":0.5496347024}, +{"learn":[138.3582187],"iteration":488,"passed_time":0.5251143661,"remaining_time":0.5487391433}, +{"learn":[137.9994483],"iteration":489,"passed_time":0.5269865731,"remaining_time":0.5484962291}, +{"learn":[137.2762793],"iteration":490,"passed_time":0.5280184898,"remaining_time":0.5473755831}, +{"learn":[137.2319494],"iteration":491,"passed_time":0.5289387408,"remaining_time":0.5461400006}, +{"learn":[136.8284401],"iteration":492,"passed_time":0.5308806962,"remaining_time":0.5459564158}, +{"learn":[136.0778994],"iteration":493,"passed_time":0.5318657592,"remaining_time":0.5447855752}, +{"learn":[135.7573861],"iteration":494,"passed_time":0.5327836058,"remaining_time":0.543546911}, +{"learn":[135.2420576],"iteration":495,"passed_time":0.5337209173,"remaining_time":0.5423293192}, +{"learn":[134.5639228],"iteration":496,"passed_time":0.5346187718,"remaining_time":0.5410729219}, +{"learn":[134.1998245],"iteration":497,"passed_time":0.5354874533,"remaining_time":0.5397885573}, +{"learn":[133.9053919],"iteration":498,"passed_time":0.5364598188,"remaining_time":0.5386099584}, +{"learn":[133.6473876],"iteration":499,"passed_time":0.5375266255,"remaining_time":0.5375266255}, +{"learn":[133.2527423],"iteration":500,"passed_time":0.5390280141,"remaining_time":0.5368762056}, +{"learn":[133.1154401],"iteration":501,"passed_time":0.5402843284,"remaining_time":0.535979274}, +{"learn":[132.7053765],"iteration":502,"passed_time":0.5413307009,"remaining_time":0.5348734759}, +{"learn":[132.6254567],"iteration":503,"passed_time":0.5430196986,"remaining_time":0.5344003383}, +{"learn":[132.42907],"iteration":504,"passed_time":0.5450514318,"remaining_time":0.5342583341}, +{"learn":[132.0886631],"iteration":505,"passed_time":0.546295965,"remaining_time":0.5333403294}, +{"learn":[132.0251257],"iteration":506,"passed_time":0.5473857396,"remaining_time":0.5322705515}, +{"learn":[131.8522651],"iteration":507,"passed_time":0.5485178867,"remaining_time":0.5312417328}, +{"learn":[131.7043809],"iteration":508,"passed_time":0.549607557,"remaining_time":0.5301715334}, +{"learn":[131.6551685],"iteration":509,"passed_time":0.5505935195,"remaining_time":0.5290016167}, +{"learn":[131.3064159],"iteration":510,"passed_time":0.5524212168,"remaining_time":0.5286379159}, +{"learn":[131.2539164],"iteration":511,"passed_time":0.5540426827,"remaining_time":0.5280719319}, +{"learn":[130.802881],"iteration":512,"passed_time":0.5580595423,"remaining_time":0.5297758228}, +{"learn":[130.7267198],"iteration":513,"passed_time":0.5592597941,"remaining_time":0.5287942801}, +{"learn":[130.6751541],"iteration":514,"passed_time":0.5601386559,"remaining_time":0.5275092196}, +{"learn":[130.3496136],"iteration":515,"passed_time":0.5611183732,"remaining_time":0.5263203346}, +{"learn":[129.8251839],"iteration":516,"passed_time":0.5618921686,"remaining_time":0.524939879}, +{"learn":[129.7822498],"iteration":517,"passed_time":0.5626615506,"remaining_time":0.5235576591}, +{"learn":[129.5051843],"iteration":518,"passed_time":0.5634500703,"remaining_time":0.5221955372}, +{"learn":[129.3134521],"iteration":519,"passed_time":0.5642359242,"remaining_time":0.5208331608}, +{"learn":[128.9161601],"iteration":520,"passed_time":0.5650795567,"remaining_time":0.5195261183}, +{"learn":[128.4843352],"iteration":521,"passed_time":0.5658644517,"remaining_time":0.518167065}, +{"learn":[128.3649809],"iteration":522,"passed_time":0.5673728311,"remaining_time":0.5174700581}, +{"learn":[127.7238357],"iteration":523,"passed_time":0.5684390768,"remaining_time":0.5163683217}, +{"learn":[127.6690506],"iteration":524,"passed_time":0.5714271487,"remaining_time":0.5170055155}, +{"learn":[127.3190322],"iteration":525,"passed_time":0.5726041875,"remaining_time":0.5159969294}, +{"learn":[126.7021578],"iteration":526,"passed_time":0.5737045946,"remaining_time":0.5149189245}, +{"learn":[126.32958],"iteration":527,"passed_time":0.5747645162,"remaining_time":0.5138046433}, +{"learn":[125.9108245],"iteration":528,"passed_time":0.5757300609,"remaining_time":0.5126065382}, +{"learn":[125.332027],"iteration":529,"passed_time":0.5767271586,"remaining_time":0.5114372916}, +{"learn":[125.1915011],"iteration":530,"passed_time":0.5775934006,"remaining_time":0.5101531165}, +{"learn":[124.5991005],"iteration":531,"passed_time":0.5784321094,"remaining_time":0.5088462917}, +{"learn":[124.277708],"iteration":532,"passed_time":0.5792126591,"remaining_time":0.507490266}, +{"learn":[124.1804067],"iteration":533,"passed_time":0.5800212439,"remaining_time":0.5061608608}, +{"learn":[124.0032042],"iteration":534,"passed_time":0.5807150814,"remaining_time":0.5047336689}, +{"learn":[123.91302],"iteration":535,"passed_time":0.5816192279,"remaining_time":0.5034912719}, +{"learn":[123.6086038],"iteration":536,"passed_time":0.5827263965,"remaining_time":0.5024251798}, +{"learn":[123.0597351],"iteration":537,"passed_time":0.5836574063,"remaining_time":0.5012076611}, +{"learn":[122.7188736],"iteration":538,"passed_time":0.5858546582,"remaining_time":0.5010742067}, +{"learn":[122.5133584],"iteration":539,"passed_time":0.5868662262,"remaining_time":0.4999230816}, +{"learn":[122.1510768],"iteration":540,"passed_time":0.5878454522,"remaining_time":0.4987450325}, +{"learn":[122.1148408],"iteration":541,"passed_time":0.5886519479,"remaining_time":0.4974217567}, +{"learn":[121.7833691],"iteration":542,"passed_time":0.5893892801,"remaining_time":0.496042175}, +{"learn":[121.7468344],"iteration":543,"passed_time":0.5901614931,"remaining_time":0.4946941927}, +{"learn":[121.7049673],"iteration":544,"passed_time":0.5908561588,"remaining_time":0.4932835821}, +{"learn":[121.4597814],"iteration":545,"passed_time":0.5915496919,"remaining_time":0.4918746522}, +{"learn":[121.2510597],"iteration":546,"passed_time":0.5922973806,"remaining_time":0.4905131872}, +{"learn":[121.1959397],"iteration":547,"passed_time":0.5929502544,"remaining_time":0.4890757573}, +{"learn":[121.0473791],"iteration":548,"passed_time":0.593619889,"remaining_time":0.4876549543}, +{"learn":[120.7324814],"iteration":549,"passed_time":0.5942963066,"remaining_time":0.4862424327}, +{"learn":[120.4692276],"iteration":550,"passed_time":0.5950399118,"remaining_time":0.4848873328}, +{"learn":[120.3943509],"iteration":551,"passed_time":0.5956651125,"remaining_time":0.4834383522}, +{"learn":[120.3597888],"iteration":552,"passed_time":0.5963589422,"remaining_time":0.4820478249}, +{"learn":[120.0759583],"iteration":553,"passed_time":0.5985952588,"remaining_time":0.4819015982}, +{"learn":[119.9362893],"iteration":554,"passed_time":0.5997194925,"remaining_time":0.4808561696}, +{"learn":[119.6710969],"iteration":555,"passed_time":0.6009058383,"remaining_time":0.4798600579}, +{"learn":[119.3271803],"iteration":556,"passed_time":0.6021408013,"remaining_time":0.4789019299}, +{"learn":[119.0593249],"iteration":557,"passed_time":0.6032022505,"remaining_time":0.4778053669}, +{"learn":[118.9329291],"iteration":558,"passed_time":0.6042225759,"remaining_time":0.4766764865}, +{"learn":[118.418452],"iteration":559,"passed_time":0.6051830071,"remaining_time":0.4755009342}, +{"learn":[118.1574761],"iteration":560,"passed_time":0.6059945959,"remaining_time":0.4742096749}, +{"learn":[117.7196836],"iteration":561,"passed_time":0.6067419136,"remaining_time":0.4728700323}, +{"learn":[117.3428279],"iteration":562,"passed_time":0.6075814301,"remaining_time":0.4716040585}, +{"learn":[117.186318],"iteration":563,"passed_time":0.6084530098,"remaining_time":0.4703643835}, +{"learn":[117.1112686],"iteration":564,"passed_time":0.6091879723,"remaining_time":0.4690208283}, +{"learn":[116.8819815],"iteration":565,"passed_time":0.6102514146,"remaining_time":0.4679312967}, +{"learn":[116.6706405],"iteration":566,"passed_time":0.6112153772,"remaining_time":0.4667658877}, +{"learn":[116.2121918],"iteration":567,"passed_time":0.6119737308,"remaining_time":0.4654448094}, +{"learn":[116.0962801],"iteration":568,"passed_time":0.6141752362,"remaining_time":0.465218852}, +{"learn":[116.052204],"iteration":569,"passed_time":0.6158600039,"remaining_time":0.4645961433}, +{"learn":[115.8082428],"iteration":570,"passed_time":0.6170066132,"remaining_time":0.4635653889}, +{"learn":[115.7286278],"iteration":571,"passed_time":0.6180947883,"remaining_time":0.4624905059}, +{"learn":[115.5696183],"iteration":572,"passed_time":0.6190866753,"remaining_time":0.4613438226}, +{"learn":[115.3878373],"iteration":573,"passed_time":0.6199408751,"remaining_time":0.4600954927}, +{"learn":[115.1568175],"iteration":574,"passed_time":0.6206712459,"remaining_time":0.4587570078}, +{"learn":[114.9132003],"iteration":575,"passed_time":0.6214002987,"remaining_time":0.4574196643}, +{"learn":[114.864146],"iteration":576,"passed_time":0.622133411,"remaining_time":0.4560874053}, +{"learn":[114.7956804],"iteration":577,"passed_time":0.6228816916,"remaining_time":0.4547682938}, +{"learn":[114.4332725],"iteration":578,"passed_time":0.623679947,"remaining_time":0.4534874917}, +{"learn":[114.2023529],"iteration":579,"passed_time":0.6244521428,"remaining_time":0.4521894827}, +{"learn":[114.1078602],"iteration":580,"passed_time":0.6255751334,"remaining_time":0.4511462666}, +{"learn":[113.7429863],"iteration":581,"passed_time":0.6263876819,"remaining_time":0.4498798128}, +{"learn":[113.4915904],"iteration":582,"passed_time":0.6271613619,"remaining_time":0.4485871148}, +{"learn":[113.3195407],"iteration":583,"passed_time":0.6286418431,"remaining_time":0.4477996691}, +{"learn":[112.84174],"iteration":584,"passed_time":0.629708252,"remaining_time":0.4467161104}, +{"learn":[112.4972889],"iteration":585,"passed_time":0.6307265409,"remaining_time":0.4455986142}, +{"learn":[112.4118409],"iteration":586,"passed_time":0.6318672394,"remaining_time":0.4445675807}, +{"learn":[112.3541925],"iteration":587,"passed_time":0.6328667182,"remaining_time":0.4434372243}, +{"learn":[112.1993478],"iteration":588,"passed_time":0.6337699261,"remaining_time":0.4422401352}, +{"learn":[112.0858921],"iteration":589,"passed_time":0.6346600092,"remaining_time":0.4410349217}, +{"learn":[112.0542527],"iteration":590,"passed_time":0.6354657696,"remaining_time":0.4397724192}, +{"learn":[112.0106512],"iteration":591,"passed_time":0.6362922707,"remaining_time":0.4385257541}, +{"learn":[111.7676748],"iteration":592,"passed_time":0.6370746688,"remaining_time":0.4372502364}, +{"learn":[111.6282142],"iteration":593,"passed_time":0.6378353227,"remaining_time":0.4359615169}, +{"learn":[111.5652245],"iteration":594,"passed_time":0.6385584901,"remaining_time":0.4346490563}, +{"learn":[111.3232837],"iteration":595,"passed_time":0.6393004291,"remaining_time":0.4333512976}, +{"learn":[111.1573668],"iteration":596,"passed_time":0.6400652212,"remaining_time":0.4320708277}, +{"learn":[110.9416805],"iteration":597,"passed_time":0.640927753,"remaining_time":0.4308577872}, +{"learn":[110.8623598],"iteration":598,"passed_time":0.6417882435,"remaining_time":0.4296445503}, +{"learn":[110.5618879],"iteration":599,"passed_time":0.6425433869,"remaining_time":0.428362258}, +{"learn":[110.4440189],"iteration":600,"passed_time":0.6442564345,"remaining_time":0.4277176662}, +{"learn":[110.3327871],"iteration":601,"passed_time":0.6452137154,"remaining_time":0.426569865}, +{"learn":[110.1214915],"iteration":602,"passed_time":0.6481289498,"remaining_time":0.426711763}, +{"learn":[109.9496065],"iteration":603,"passed_time":0.6491581966,"remaining_time":0.4256070295}, +{"learn":[109.6583282],"iteration":604,"passed_time":0.6502028502,"remaining_time":0.4245126047}, +{"learn":[109.6044222],"iteration":605,"passed_time":0.6510298427,"remaining_time":0.4232768284}, +{"learn":[109.4820211],"iteration":606,"passed_time":0.6519679657,"remaining_time":0.4221143501}, +{"learn":[109.2698381],"iteration":607,"passed_time":0.6536855791,"remaining_time":0.421455176}, +{"learn":[109.1570669],"iteration":608,"passed_time":0.6546287507,"remaining_time":0.4202953063}, +{"learn":[109.1252474],"iteration":609,"passed_time":0.6554912516,"remaining_time":0.4190845707}, +{"learn":[108.9188958],"iteration":610,"passed_time":0.6562546392,"remaining_time":0.4178118734}, +{"learn":[108.7261926],"iteration":611,"passed_time":0.6576125923,"remaining_time":0.4169177872}, +{"learn":[108.5619027],"iteration":612,"passed_time":0.6593942801,"remaining_time":0.4162897005}, +{"learn":[108.5113332],"iteration":613,"passed_time":0.6602945018,"remaining_time":0.4151037096}, +{"learn":[108.4806216],"iteration":614,"passed_time":0.6613355287,"remaining_time":0.4140067944}, +{"learn":[108.3221197],"iteration":615,"passed_time":0.6630579806,"remaining_time":0.413334845}, +{"learn":[108.0966532],"iteration":616,"passed_time":0.6642254003,"remaining_time":0.4123149568}, +{"learn":[107.8732155],"iteration":617,"passed_time":0.6651944867,"remaining_time":0.4111719966}, +{"learn":[107.8368224],"iteration":618,"passed_time":0.6661608001,"remaining_time":0.4100278915}, +{"learn":[107.7420721],"iteration":619,"passed_time":0.6671039551,"remaining_time":0.408870166}, +{"learn":[107.5455535],"iteration":620,"passed_time":0.6679729656,"remaining_time":0.4076678808}, +{"learn":[107.3268846],"iteration":621,"passed_time":0.6687975769,"remaining_time":0.406439685}, +{"learn":[107.2784579],"iteration":622,"passed_time":0.6695651831,"remaining_time":0.405178289}, +{"learn":[107.2004171],"iteration":623,"passed_time":0.6703633926,"remaining_time":0.4039369161}, +{"learn":[106.9064835],"iteration":624,"passed_time":0.6714924225,"remaining_time":0.4028954535}, +{"learn":[106.4321729],"iteration":625,"passed_time":0.6723565746,"remaining_time":0.4016954615}, +{"learn":[106.3370654],"iteration":626,"passed_time":0.6732694812,"remaining_time":0.4005255446}, +{"learn":[106.0784342],"iteration":627,"passed_time":0.6740400974,"remaining_time":0.3992721596}, +{"learn":[106.0356971],"iteration":628,"passed_time":0.6758585521,"remaining_time":0.3986383511}, +{"learn":[105.793307],"iteration":629,"passed_time":0.6778240753,"remaining_time":0.3980871553}, +{"learn":[105.772673],"iteration":630,"passed_time":0.6790221642,"remaining_time":0.3970826919}, +{"learn":[105.5621032],"iteration":631,"passed_time":0.6801488691,"remaining_time":0.3960360504}, +{"learn":[105.5327179],"iteration":632,"passed_time":0.6810385328,"remaining_time":0.3948517244}, +{"learn":[105.4297935],"iteration":633,"passed_time":0.6818223094,"remaining_time":0.3936072007}, +{"learn":[105.349791],"iteration":634,"passed_time":0.6825602103,"remaining_time":0.3923377587}, +{"learn":[105.3207121],"iteration":635,"passed_time":0.6834254228,"remaining_time":0.3911428521}, +{"learn":[105.0587917],"iteration":636,"passed_time":0.6843279922,"remaining_time":0.3899702687}, +{"learn":[104.9291032],"iteration":637,"passed_time":0.6851580282,"remaining_time":0.3887573765}, +{"learn":[104.7230249],"iteration":638,"passed_time":0.6859577361,"remaining_time":0.3875285489}, +{"learn":[104.5480841],"iteration":639,"passed_time":0.6871994127,"remaining_time":0.3865496697}, +{"learn":[104.3438205],"iteration":640,"passed_time":0.6880311444,"remaining_time":0.3853403757}, +{"learn":[104.3146935],"iteration":641,"passed_time":0.6888299482,"remaining_time":0.3841138964}, +{"learn":[104.0057678],"iteration":642,"passed_time":0.6897539748,"remaining_time":0.3829582722}, +{"learn":[103.9290039],"iteration":643,"passed_time":0.6908476479,"remaining_time":0.381897147}, +{"learn":[103.6376607],"iteration":644,"passed_time":0.6919440469,"remaining_time":0.3808374212}, +{"learn":[103.3721232],"iteration":645,"passed_time":0.6949435004,"remaining_time":0.3808204321}, +{"learn":[103.1844589],"iteration":646,"passed_time":0.695887756,"remaining_time":0.3796729179}, +{"learn":[103.0468138],"iteration":647,"passed_time":0.6969041231,"remaining_time":0.3785652026}, +{"learn":[102.9096825],"iteration":648,"passed_time":0.6976508639,"remaining_time":0.3773119464}, +{"learn":[102.8911943],"iteration":649,"passed_time":0.6983372884,"remaining_time":0.3760277707}, +{"learn":[102.7599087],"iteration":650,"passed_time":0.6990989237,"remaining_time":0.3747857517}, +{"learn":[102.5024037],"iteration":651,"passed_time":0.6998686821,"remaining_time":0.373549542}, +{"learn":[102.1011329],"iteration":652,"passed_time":0.7005717066,"remaining_time":0.3722792989}, +{"learn":[101.8532865],"iteration":653,"passed_time":0.7014163472,"remaining_time":0.3710857128}, +{"learn":[101.7601504],"iteration":654,"passed_time":0.7022962452,"remaining_time":0.3699117627}, +{"learn":[101.6918072],"iteration":655,"passed_time":0.7031633037,"remaining_time":0.3687319763}, +{"learn":[101.5773558],"iteration":656,"passed_time":0.7039965006,"remaining_time":0.3675354638}, +{"learn":[101.4480806],"iteration":657,"passed_time":0.7049205318,"remaining_time":0.3663872673}, +{"learn":[101.3912872],"iteration":658,"passed_time":0.7060136153,"remaining_time":0.3653272273}, +{"learn":[101.1216664],"iteration":659,"passed_time":0.7069125889,"remaining_time":0.3641670912}, +{"learn":[100.9894583],"iteration":660,"passed_time":0.7080264796,"remaining_time":0.3631179676}, +{"learn":[100.7710811],"iteration":661,"passed_time":0.7099158704,"remaining_time":0.3624645985}, +{"learn":[100.3719608],"iteration":662,"passed_time":0.7111093045,"remaining_time":0.361453749}, +{"learn":[100.1535745],"iteration":663,"passed_time":0.712169866,"remaining_time":0.3603751129}, +{"learn":[99.90273285],"iteration":664,"passed_time":0.7131535413,"remaining_time":0.359257799}, +{"learn":[99.75122622],"iteration":665,"passed_time":0.7139613748,"remaining_time":0.3580527015}, +{"learn":[99.59773921],"iteration":666,"passed_time":0.7148129673,"remaining_time":0.3568706418}, +{"learn":[99.45590455],"iteration":667,"passed_time":0.7157007006,"remaining_time":0.3557075338}, +{"learn":[99.35142904],"iteration":668,"passed_time":0.7164607453,"remaining_time":0.3544820728}, +{"learn":[98.97802968],"iteration":669,"passed_time":0.7171754321,"remaining_time":0.3532356606}, +{"learn":[98.63061279],"iteration":670,"passed_time":0.7179290501,"remaining_time":0.3520099217}, +{"learn":[98.33636214],"iteration":671,"passed_time":0.7187698674,"remaining_time":0.3508281495}, +{"learn":[98.09710463],"iteration":672,"passed_time":0.7195751424,"remaining_time":0.3496301212}, +{"learn":[97.83469051],"iteration":673,"passed_time":0.7206026311,"remaining_time":0.3485407385}, +{"learn":[97.65662438],"iteration":674,"passed_time":0.7215916374,"remaining_time":0.3474330106}, +{"learn":[97.41941931],"iteration":675,"passed_time":0.7227597273,"remaining_time":0.3464114669}, +{"learn":[97.16789393],"iteration":676,"passed_time":0.7262212835,"remaining_time":0.3464837143}, +{"learn":[96.79614151],"iteration":677,"passed_time":0.7273843066,"remaining_time":0.3454539037}, +{"learn":[96.63508251],"iteration":678,"passed_time":0.7283786115,"remaining_time":0.3443439386}, +{"learn":[96.48027557],"iteration":679,"passed_time":0.729200784,"remaining_time":0.3431533101}, +{"learn":[96.23638366],"iteration":680,"passed_time":0.7301008857,"remaining_time":0.342000268}, +{"learn":[96.08591125],"iteration":681,"passed_time":0.7309289657,"remaining_time":0.3408143857}, +{"learn":[95.96102642],"iteration":682,"passed_time":0.7317194584,"remaining_time":0.3396121059}, +{"learn":[95.73700045],"iteration":683,"passed_time":0.7325573393,"remaining_time":0.3384329228}, +{"learn":[95.59880493],"iteration":684,"passed_time":0.7333243352,"remaining_time":0.3372221395}, +{"learn":[95.57022852],"iteration":685,"passed_time":0.7343001779,"remaining_time":0.3361082447}, +{"learn":[95.54346281],"iteration":686,"passed_time":0.7352199418,"remaining_time":0.334969202}, +{"learn":[95.30037527],"iteration":687,"passed_time":0.7363491443,"remaining_time":0.3339257748}, +{"learn":[95.14140291],"iteration":688,"passed_time":0.7374677013,"remaining_time":0.3328772933}, +{"learn":[95.00647918],"iteration":689,"passed_time":0.7387579238,"remaining_time":0.3319057339}, +{"learn":[94.90320875],"iteration":690,"passed_time":0.7397654413,"remaining_time":0.3308068327}, +{"learn":[94.67619173],"iteration":691,"passed_time":0.7407022241,"remaining_time":0.3296767124}, +{"learn":[94.44599217],"iteration":692,"passed_time":0.7416647744,"remaining_time":0.3285585653}, +{"learn":[94.2643355],"iteration":693,"passed_time":0.7424910113,"remaining_time":0.3273807629}, +{"learn":[94.09580964],"iteration":694,"passed_time":0.7433753447,"remaining_time":0.3262294678}, +{"learn":[93.88412934],"iteration":695,"passed_time":0.7442108793,"remaining_time":0.3250576255}, +{"learn":[93.71874897],"iteration":696,"passed_time":0.7449571865,"remaining_time":0.3238479591}, +{"learn":[93.5744453],"iteration":697,"passed_time":0.7457697666,"remaining_time":0.3226682944}, +{"learn":[93.52003371],"iteration":698,"passed_time":0.7464912191,"remaining_time":0.3214504391}, +{"learn":[93.28456464],"iteration":699,"passed_time":0.7472562006,"remaining_time":0.3202526574}, +{"learn":[93.123137],"iteration":700,"passed_time":0.7480259423,"remaining_time":0.3190581409}, +{"learn":[92.85437766],"iteration":701,"passed_time":0.7488495996,"remaining_time":0.3178877218}, +{"learn":[92.82821716],"iteration":702,"passed_time":0.7496345075,"remaining_time":0.3167019185}, +{"learn":[92.80801325],"iteration":703,"passed_time":0.7504061702,"remaining_time":0.3155116852}, +{"learn":[92.67006319],"iteration":704,"passed_time":0.7522964198,"remaining_time":0.3147907005}, +{"learn":[92.45803962],"iteration":705,"passed_time":0.7544592608,"remaining_time":0.3141799188}, +{"learn":[92.26768076],"iteration":706,"passed_time":0.755647225,"remaining_time":0.3131607312}, +{"learn":[92.04517711],"iteration":707,"passed_time":0.7565648516,"remaining_time":0.3120295716}, +{"learn":[92.01934367],"iteration":708,"passed_time":0.7574189065,"remaining_time":0.3108729221}, +{"learn":[91.69217515],"iteration":709,"passed_time":0.7582567878,"remaining_time":0.309710519}, +{"learn":[91.41262654],"iteration":710,"passed_time":0.7590883492,"remaining_time":0.3085464598}, +{"learn":[91.22844365],"iteration":711,"passed_time":0.7598545966,"remaining_time":0.3073569155}, +{"learn":[91.0029696],"iteration":712,"passed_time":0.7605807641,"remaining_time":0.3061524254}, +{"learn":[90.91927956],"iteration":713,"passed_time":0.7613696612,"remaining_time":0.3049744021}, +{"learn":[90.78425174],"iteration":714,"passed_time":0.7621744052,"remaining_time":0.3038037839}, +{"learn":[90.65674763],"iteration":715,"passed_time":0.7629243811,"remaining_time":0.302612464}, +{"learn":[90.53865432],"iteration":716,"passed_time":0.7636874105,"remaining_time":0.3014275274}, +{"learn":[90.39671287],"iteration":717,"passed_time":0.7644594956,"remaining_time":0.3002473228}, +{"learn":[90.24281762],"iteration":718,"passed_time":0.7651869357,"remaining_time":0.2990508052}, +{"learn":[90.10843845],"iteration":719,"passed_time":0.7659531055,"remaining_time":0.2978706521}, +{"learn":[90.07088285],"iteration":720,"passed_time":0.7674854628,"remaining_time":0.2969881333}, +{"learn":[89.75778642],"iteration":721,"passed_time":0.7683590313,"remaining_time":0.2958501533}, +{"learn":[89.44493911],"iteration":722,"passed_time":0.7700823135,"remaining_time":0.2950384521}, +{"learn":[89.33218415],"iteration":723,"passed_time":0.7709464631,"remaining_time":0.293896718}, +{"learn":[89.22294498],"iteration":724,"passed_time":0.7717110863,"remaining_time":0.2927179983}, +{"learn":[88.99462025],"iteration":725,"passed_time":0.7724878397,"remaining_time":0.2915449974}, +{"learn":[88.87335106],"iteration":726,"passed_time":0.7738799594,"remaining_time":0.2906041663}, +{"learn":[88.72574375],"iteration":727,"passed_time":0.7753342048,"remaining_time":0.2896853073}, +{"learn":[88.60158818],"iteration":728,"passed_time":0.7761022227,"remaining_time":0.2885098798}, +{"learn":[88.44917471],"iteration":729,"passed_time":0.7768109005,"remaining_time":0.2873136207}, +{"learn":[88.39997405],"iteration":730,"passed_time":0.7777904188,"remaining_time":0.2862183621}, +{"learn":[88.28397782],"iteration":731,"passed_time":0.7784858277,"remaining_time":0.2850194014}, +{"learn":[88.25960648],"iteration":732,"passed_time":0.7791499945,"remaining_time":0.2838104346}, +{"learn":[88.04324091],"iteration":733,"passed_time":0.7798482928,"remaining_time":0.2826153214}, +{"learn":[88.02202762],"iteration":734,"passed_time":0.7805007638,"remaining_time":0.2814050373}, +{"learn":[87.71503579],"iteration":735,"passed_time":0.7811786984,"remaining_time":0.2802054027}, +{"learn":[87.52364778],"iteration":736,"passed_time":0.782508738,"remaining_time":0.2792398889}, +{"learn":[87.28070694],"iteration":737,"passed_time":0.783406132,"remaining_time":0.2781197921}, +{"learn":[87.14627098],"iteration":738,"passed_time":0.7841759659,"remaining_time":0.2769552464}, +{"learn":[87.00704387],"iteration":739,"passed_time":0.7848204866,"remaining_time":0.2757477385}, +{"learn":[86.92646242],"iteration":740,"passed_time":0.7857207956,"remaining_time":0.2746311553}, +{"learn":[86.82729647],"iteration":741,"passed_time":0.7864870216,"remaining_time":0.2734685331}, +{"learn":[86.78045019],"iteration":742,"passed_time":0.7872247434,"remaining_time":0.2722971185}, +{"learn":[86.59043507],"iteration":743,"passed_time":0.7880502179,"remaining_time":0.2711570642}, +{"learn":[86.36661044],"iteration":744,"passed_time":0.7887814279,"remaining_time":0.2699855894}, +{"learn":[86.16361791],"iteration":745,"passed_time":0.789477559,"remaining_time":0.2688033512}, +{"learn":[85.99162242],"iteration":746,"passed_time":0.7901184422,"remaining_time":0.2676037027}, +{"learn":[85.97975009],"iteration":747,"passed_time":0.7907322133,"remaining_time":0.2663964141}, +{"learn":[85.72576134],"iteration":748,"passed_time":0.7914253332,"remaining_time":0.2652173013}, +{"learn":[85.55863112],"iteration":749,"passed_time":0.7920822365,"remaining_time":0.2640274122}, +{"learn":[85.53790883],"iteration":750,"passed_time":0.792725496,"remaining_time":0.2628344188}, +{"learn":[85.48072686],"iteration":751,"passed_time":0.7933780867,"remaining_time":0.2616459648}, +{"learn":[85.33480816],"iteration":752,"passed_time":0.7940203234,"remaining_time":0.2604555377}, +{"learn":[85.21920337],"iteration":753,"passed_time":0.7947161614,"remaining_time":0.2592840527}, +{"learn":[85.1201592],"iteration":754,"passed_time":0.7953870903,"remaining_time":0.2581057445}, +{"learn":[84.98406257],"iteration":755,"passed_time":0.7961225585,"remaining_time":0.2569496088}, +{"learn":[84.84864168],"iteration":756,"passed_time":0.7968045294,"remaining_time":0.2557774117}, +{"learn":[84.83176765],"iteration":757,"passed_time":0.7974547767,"remaining_time":0.2545963799}, +{"learn":[84.63682584],"iteration":758,"passed_time":0.7991320911,"remaining_time":0.2537428642}, +{"learn":[84.45718166],"iteration":759,"passed_time":0.8000772294,"remaining_time":0.2526559672}, +{"learn":[84.2623255],"iteration":760,"passed_time":0.8008348466,"remaining_time":0.2515105497}, +{"learn":[84.06250083],"iteration":761,"passed_time":0.8016925129,"remaining_time":0.2503973991}, +{"learn":[83.98742891],"iteration":762,"passed_time":0.8023711408,"remaining_time":0.2492293059}, +{"learn":[83.9654573],"iteration":763,"passed_time":0.8030082421,"remaining_time":0.2480496664}, +{"learn":[83.80895885],"iteration":764,"passed_time":0.8037511763,"remaining_time":0.2469039561}, +{"learn":[83.68567772],"iteration":765,"passed_time":0.8044075701,"remaining_time":0.2457328608}, +{"learn":[83.50718495],"iteration":766,"passed_time":0.8050305809,"remaining_time":0.2445529666}, +{"learn":[83.46609007],"iteration":767,"passed_time":0.8056858978,"remaining_time":0.2433842816}, +{"learn":[83.45135397],"iteration":768,"passed_time":0.8064015187,"remaining_time":0.2422350466}, +{"learn":[83.2363417],"iteration":769,"passed_time":0.8070674702,"remaining_time":0.2410721015}, +{"learn":[83.17236547],"iteration":770,"passed_time":0.8077756178,"remaining_time":0.2399229786}, +{"learn":[82.91317357],"iteration":771,"passed_time":0.8084010165,"remaining_time":0.2387505593}, +{"learn":[82.79827798],"iteration":772,"passed_time":0.8090883316,"remaining_time":0.2375977377}, +{"learn":[82.70452039],"iteration":773,"passed_time":0.8097587577,"remaining_time":0.2364411876}, +{"learn":[82.54522036],"iteration":774,"passed_time":0.8104633095,"remaining_time":0.2352957995}, +{"learn":[82.36753462],"iteration":775,"passed_time":0.8111927634,"remaining_time":0.2341587358}, +{"learn":[82.14133457],"iteration":776,"passed_time":0.8118758723,"remaining_time":0.2330094202}, +{"learn":[81.94908304],"iteration":777,"passed_time":0.8125830983,"remaining_time":0.2318681849}, +{"learn":[81.78425609],"iteration":778,"passed_time":0.8135222369,"remaining_time":0.2307938567}, +{"learn":[81.74852642],"iteration":779,"passed_time":0.8142539124,"remaining_time":0.2296613599}, +{"learn":[81.55502016],"iteration":780,"passed_time":0.8149583268,"remaining_time":0.2285222453}, +{"learn":[81.3451698],"iteration":781,"passed_time":0.8157531802,"remaining_time":0.2274094543}, +{"learn":[81.31970895],"iteration":782,"passed_time":0.8164414866,"remaining_time":0.2262679471}, +{"learn":[81.27369229],"iteration":783,"passed_time":0.8171602207,"remaining_time":0.2251359792}, +{"learn":[81.21918123],"iteration":784,"passed_time":0.8178554761,"remaining_time":0.2239986336}, +{"learn":[80.9907386],"iteration":785,"passed_time":0.8185843392,"remaining_time":0.2228715631}, +{"learn":[80.83250622],"iteration":786,"passed_time":0.8193405548,"remaining_time":0.2217529075}, +{"learn":[80.70867401],"iteration":787,"passed_time":0.8200165908,"remaining_time":0.2206136006}, +{"learn":[80.5228913],"iteration":788,"passed_time":0.8206580726,"remaining_time":0.2194662273}, +{"learn":[80.41817098],"iteration":789,"passed_time":0.8213540773,"remaining_time":0.2183346281}, +{"learn":[80.38770927],"iteration":790,"passed_time":0.8219971074,"remaining_time":0.2171901333}, +{"learn":[80.20767199],"iteration":791,"passed_time":0.8227280532,"remaining_time":0.2160699938}, +{"learn":[80.09246156],"iteration":792,"passed_time":0.8234441046,"remaining_time":0.2149469478}, +{"learn":[79.91825023],"iteration":793,"passed_time":0.8241219904,"remaining_time":0.2138150252}, +{"learn":[79.88651678],"iteration":794,"passed_time":0.8247912727,"remaining_time":0.2126820263}, +{"learn":[79.7107924],"iteration":795,"passed_time":0.8254260754,"remaining_time":0.211541356}, +{"learn":[79.59117874],"iteration":796,"passed_time":0.8261375891,"remaining_time":0.2104214938}, +{"learn":[79.42415425],"iteration":797,"passed_time":0.8268592834,"remaining_time":0.2093052322}, +{"learn":[79.38917271],"iteration":798,"passed_time":0.8275419935,"remaining_time":0.2081801511}, +{"learn":[79.31480069],"iteration":799,"passed_time":0.8283727242,"remaining_time":0.2070931811}, +{"learn":[79.25863184],"iteration":800,"passed_time":0.8292364277,"remaining_time":0.2060150426}, +{"learn":[79.19776292],"iteration":801,"passed_time":0.8299963609,"remaining_time":0.2049118198}, +{"learn":[79.07016322],"iteration":802,"passed_time":0.830779124,"remaining_time":0.2038150528}, +{"learn":[78.95728068],"iteration":803,"passed_time":0.831467153,"remaining_time":0.2026959726}, +{"learn":[78.882104],"iteration":804,"passed_time":0.832154421,"remaining_time":0.201577779}, +{"learn":[78.78759484],"iteration":805,"passed_time":0.832818456,"remaining_time":0.2004550626}, +{"learn":[78.65679994],"iteration":806,"passed_time":0.8334875849,"remaining_time":0.1993347012}, +{"learn":[78.470072],"iteration":807,"passed_time":0.8341785335,"remaining_time":0.1982206416}, +{"learn":[78.33314013],"iteration":808,"passed_time":0.8348443564,"remaining_time":0.197101696}, +{"learn":[78.19015712],"iteration":809,"passed_time":0.8355431179,"remaining_time":0.1959915956}, +{"learn":[78.13812619],"iteration":810,"passed_time":0.8362526735,"remaining_time":0.194885025}, +{"learn":[78.01360261],"iteration":811,"passed_time":0.8368585529,"remaining_time":0.1937554285}, +{"learn":[77.84131133],"iteration":812,"passed_time":0.8375499049,"remaining_time":0.1926467801}, +{"learn":[77.7753725],"iteration":813,"passed_time":0.8382422419,"remaining_time":0.1915393821}, +{"learn":[77.58358531],"iteration":814,"passed_time":0.838930737,"remaining_time":0.1904321305}, +{"learn":[77.35581633],"iteration":815,"passed_time":0.8395912079,"remaining_time":0.1893195861}, +{"learn":[77.23815501],"iteration":816,"passed_time":0.8402820507,"remaining_time":0.1882149514}, +{"learn":[77.19202898],"iteration":817,"passed_time":0.8409298923,"remaining_time":0.1871017609}, +{"learn":[77.08471226],"iteration":818,"passed_time":0.8416619345,"remaining_time":0.1860083152}, +{"learn":[76.96045327],"iteration":819,"passed_time":0.842360304,"remaining_time":0.1849083594}, +{"learn":[76.75096178],"iteration":820,"passed_time":0.8430602427,"remaining_time":0.183809724}, +{"learn":[76.73002244],"iteration":821,"passed_time":0.8437103123,"remaining_time":0.1827012598}, +{"learn":[76.46183148],"iteration":822,"passed_time":0.8447660471,"remaining_time":0.1816811547}, +{"learn":[76.31784696],"iteration":823,"passed_time":0.8456389126,"remaining_time":0.1806219037}, +{"learn":[76.25853616],"iteration":824,"passed_time":0.8463293869,"remaining_time":0.1795244154}, +{"learn":[76.15881617],"iteration":825,"passed_time":0.8472386048,"remaining_time":0.1784739918}, +{"learn":[76.14653618],"iteration":826,"passed_time":0.8478933089,"remaining_time":0.177370668}, +{"learn":[76.05114431],"iteration":827,"passed_time":0.8485404557,"remaining_time":0.176266858}, +{"learn":[75.93230304],"iteration":828,"passed_time":0.8492272687,"remaining_time":0.1751723317}, +{"learn":[75.76477165],"iteration":829,"passed_time":0.8500893818,"remaining_time":0.1741146927}, +{"learn":[75.64025906],"iteration":830,"passed_time":0.8507705693,"remaining_time":0.1730207295}, +{"learn":[75.42114584],"iteration":831,"passed_time":0.8514858996,"remaining_time":0.1719346528}, +{"learn":[75.35767403],"iteration":832,"passed_time":0.8521754818,"remaining_time":0.1708443043}, +{"learn":[75.25180038],"iteration":833,"passed_time":0.8528217651,"remaining_time":0.1697462986}, +{"learn":[75.23953429],"iteration":834,"passed_time":0.8536205572,"remaining_time":0.1686795113}, +{"learn":[75.20358883],"iteration":835,"passed_time":0.8542666059,"remaining_time":0.1675834012}, +{"learn":[75.12525159],"iteration":836,"passed_time":0.8549294078,"remaining_time":0.166491629}, +{"learn":[74.94534918],"iteration":837,"passed_time":0.8556362425,"remaining_time":0.1654093929}, +{"learn":[74.77815877],"iteration":838,"passed_time":0.8563240935,"remaining_time":0.1643244089}, +{"learn":[74.74762084],"iteration":839,"passed_time":0.857027435,"remaining_time":0.1632433209}, +{"learn":[74.73919979],"iteration":840,"passed_time":0.8577018304,"remaining_time":0.1621576588}, +{"learn":[74.57390243],"iteration":841,"passed_time":0.8583593942,"remaining_time":0.1610698151}, +{"learn":[74.43510998],"iteration":842,"passed_time":0.8590674706,"remaining_time":0.1599923996}, +{"learn":[74.36709609],"iteration":843,"passed_time":0.8605232764,"remaining_time":0.1590540653}, +{"learn":[74.23879576],"iteration":844,"passed_time":0.861291259,"remaining_time":0.1579883374}, +{"learn":[74.15068154],"iteration":845,"passed_time":0.8619831498,"remaining_time":0.1569094622}, +{"learn":[74.11282123],"iteration":846,"passed_time":0.8630094467,"remaining_time":0.1558919071}, +{"learn":[73.97895275],"iteration":847,"passed_time":0.863735637,"remaining_time":0.1548205387}, +{"learn":[73.86385892],"iteration":848,"passed_time":0.8644105897,"remaining_time":0.1537408705}, +{"learn":[73.80631361],"iteration":849,"passed_time":0.8650716624,"remaining_time":0.1526597051}, +{"learn":[73.6612263],"iteration":850,"passed_time":0.8658253431,"remaining_time":0.1515957416}, +{"learn":[73.57407119],"iteration":851,"passed_time":0.8664884555,"remaining_time":0.150516774}, +{"learn":[73.49557136],"iteration":852,"passed_time":0.8671150282,"remaining_time":0.1494324844}, +{"learn":[73.30607327],"iteration":853,"passed_time":0.8678588755,"remaining_time":0.148369316}, +{"learn":[73.14332929],"iteration":854,"passed_time":0.8685285919,"remaining_time":0.1472943226}, +{"learn":[73.00226481],"iteration":855,"passed_time":0.8692241209,"remaining_time":0.1462246185}, +{"learn":[72.92531658],"iteration":856,"passed_time":0.869962124,"remaining_time":0.1451628748}, +{"learn":[72.80699591],"iteration":857,"passed_time":0.8706431655,"remaining_time":0.1440924586}, +{"learn":[72.66490284],"iteration":858,"passed_time":0.8713633945,"remaining_time":0.1430293814}, +{"learn":[72.54722229],"iteration":859,"passed_time":0.8720572694,"remaining_time":0.1419628113}, +{"learn":[72.41389546],"iteration":860,"passed_time":0.8727823889,"remaining_time":0.140902151}, +{"learn":[72.31275977],"iteration":861,"passed_time":0.8734683679,"remaining_time":0.1398360032}, +{"learn":[72.12873668],"iteration":862,"passed_time":0.8741500756,"remaining_time":0.1387700583}, +{"learn":[72.0770661],"iteration":863,"passed_time":0.8755559253,"remaining_time":0.1378189882}, +{"learn":[72.01171287],"iteration":864,"passed_time":0.8762695165,"remaining_time":0.1367588263}, +{"learn":[71.99478489],"iteration":865,"passed_time":0.8769081478,"remaining_time":0.1356878658}, +{"learn":[71.87438088],"iteration":866,"passed_time":0.8778788362,"remaining_time":0.1346688411}, +{"learn":[71.7308027],"iteration":867,"passed_time":0.8785788152,"remaining_time":0.1336087599}, +{"learn":[71.69869645],"iteration":868,"passed_time":0.8792190906,"remaining_time":0.1325405073}, +{"learn":[71.68217077],"iteration":869,"passed_time":0.8798753882,"remaining_time":0.1314756327}, +{"learn":[71.61534523],"iteration":870,"passed_time":0.8805353724,"remaining_time":0.1304122423}, +{"learn":[71.55238737],"iteration":871,"passed_time":0.8812044393,"remaining_time":0.1293511104}, +{"learn":[71.42625749],"iteration":872,"passed_time":0.8818514056,"remaining_time":0.1282876615}, +{"learn":[71.33376108],"iteration":873,"passed_time":0.8824878488,"remaining_time":0.1272236487}, +{"learn":[71.13850167],"iteration":874,"passed_time":0.8831537006,"remaining_time":0.1261648144}, +{"learn":[71.03971411],"iteration":875,"passed_time":0.8838531158,"remaining_time":0.1251116283}, +{"learn":[70.87204535],"iteration":876,"passed_time":0.8845494545,"remaining_time":0.1240588175}, +{"learn":[70.80240958],"iteration":877,"passed_time":0.8852588595,"remaining_time":0.1230086342}, +{"learn":[70.68111697],"iteration":878,"passed_time":0.8859428294,"remaining_time":0.1219557251}, +{"learn":[70.61825473],"iteration":879,"passed_time":0.886606497,"remaining_time":0.120900886}, +{"learn":[70.57259726],"iteration":880,"passed_time":0.8873284875,"remaining_time":0.1198548127}, +{"learn":[70.38983888],"iteration":881,"passed_time":0.8880169417,"remaining_time":0.1188049877}, +{"learn":[70.38136988],"iteration":882,"passed_time":0.8886958445,"remaining_time":0.1177547155}, +{"learn":[70.296864],"iteration":883,"passed_time":0.8894995937,"remaining_time":0.1167216661}, +{"learn":[70.22374833],"iteration":884,"passed_time":0.8901712084,"remaining_time":0.1156719649}, +{"learn":[70.01301989],"iteration":885,"passed_time":0.89166124,"remaining_time":0.1147284214}, +{"learn":[69.9547265],"iteration":886,"passed_time":0.8923926654,"remaining_time":0.1136870025}, +{"learn":[69.80368793],"iteration":887,"passed_time":0.8930733576,"remaining_time":0.1126398829}, +{"learn":[69.66682294],"iteration":888,"passed_time":0.8941683101,"remaining_time":0.1116453121}, +{"learn":[69.58674446],"iteration":889,"passed_time":0.8949054665,"remaining_time":0.1106062936}, +{"learn":[69.53078637],"iteration":890,"passed_time":0.8955749632,"remaining_time":0.1095596756}, +{"learn":[69.47245364],"iteration":891,"passed_time":0.8962605039,"remaining_time":0.1085158458}, +{"learn":[69.2892044],"iteration":892,"passed_time":0.8969509208,"remaining_time":0.1074734026}, +{"learn":[69.21812646],"iteration":893,"passed_time":0.8976354213,"remaining_time":0.1064310455}, +{"learn":[69.10278435],"iteration":894,"passed_time":0.8983301458,"remaining_time":0.1053906875}, +{"learn":[68.97944608],"iteration":895,"passed_time":0.8989957156,"remaining_time":0.104347717}, +{"learn":[68.90118676],"iteration":896,"passed_time":0.8997058135,"remaining_time":0.103310701}, +{"learn":[68.78754012],"iteration":897,"passed_time":0.9003670148,"remaining_time":0.1022688591}, +{"learn":[68.7781134],"iteration":898,"passed_time":0.9010218411,"remaining_time":0.1012271479}, +{"learn":[68.63161424],"iteration":899,"passed_time":0.9017645741,"remaining_time":0.1001960638}, +{"learn":[68.55341997],"iteration":900,"passed_time":0.902452179,"remaining_time":0.0991595624}, +{"learn":[68.44375851],"iteration":901,"passed_time":0.9031085646,"remaining_time":0.09812044271}, +{"learn":[68.26619185],"iteration":902,"passed_time":0.9037683718,"remaining_time":0.09708253828}, +{"learn":[68.17387821],"iteration":903,"passed_time":0.9044385148,"remaining_time":0.09604656794}, +{"learn":[68.1067364],"iteration":904,"passed_time":0.9051216536,"remaining_time":0.09501277027}, +{"learn":[68.0037676],"iteration":905,"passed_time":0.9057690433,"remaining_time":0.09397603761}, +{"learn":[67.84595199],"iteration":906,"passed_time":0.9066296538,"remaining_time":0.09296202624}, +{"learn":[67.72164019],"iteration":907,"passed_time":0.9076292202,"remaining_time":0.091962432}, +{"learn":[67.59821903],"iteration":908,"passed_time":0.9085450517,"remaining_time":0.09095445513}, +{"learn":[67.55280532],"iteration":909,"passed_time":0.9092772065,"remaining_time":0.08992851493}, +{"learn":[67.45436784],"iteration":910,"passed_time":0.9099342369,"remaining_time":0.08889588045}, +{"learn":[67.31968997],"iteration":911,"passed_time":0.9105607855,"remaining_time":0.08786112842}, +{"learn":[67.13815956],"iteration":912,"passed_time":0.9112092196,"remaining_time":0.08682935608}, +{"learn":[67.03332528],"iteration":913,"passed_time":0.9119342508,"remaining_time":0.08580562972}, +{"learn":[66.93924111],"iteration":914,"passed_time":0.912633827,"remaining_time":0.08478019158}, +{"learn":[66.91058087],"iteration":915,"passed_time":0.9132997579,"remaining_time":0.08375237954}, +{"learn":[66.89843724],"iteration":916,"passed_time":0.9139534339,"remaining_time":0.08272424756}, +{"learn":[66.84815053],"iteration":917,"passed_time":0.914614721,"remaining_time":0.08169761125}, +{"learn":[66.83475141],"iteration":918,"passed_time":0.9152999394,"remaining_time":0.08067387932}, +{"learn":[66.71827241],"iteration":919,"passed_time":0.9160325667,"remaining_time":0.0796550058}, +{"learn":[66.67554343],"iteration":920,"passed_time":0.9166899461,"remaining_time":0.07863029939}, +{"learn":[66.50290341],"iteration":921,"passed_time":0.9173597516,"remaining_time":0.07760744102}, +{"learn":[66.41092302],"iteration":922,"passed_time":0.918008473,"remaining_time":0.07658358876}, +{"learn":[66.37060972],"iteration":923,"passed_time":0.9186635617,"remaining_time":0.07556107217}, +{"learn":[66.27378639],"iteration":924,"passed_time":0.9193680829,"remaining_time":0.07454335807}, +{"learn":[66.22297863],"iteration":925,"passed_time":0.9200759325,"remaining_time":0.0735265864}, +{"learn":[66.21378748],"iteration":926,"passed_time":0.9207209919,"remaining_time":0.07250553658}, +{"learn":[66.18750052],"iteration":927,"passed_time":0.9223280861,"remaining_time":0.07155993772}, +{"learn":[66.1067673],"iteration":928,"passed_time":0.9231316024,"remaining_time":0.07055150029}, +{"learn":[66.06653338],"iteration":929,"passed_time":0.9238171213,"remaining_time":0.06953462204}, +{"learn":[66.05436861],"iteration":930,"passed_time":0.924787887,"remaining_time":0.06853959635}, +{"learn":[65.99407717],"iteration":931,"passed_time":0.9254952683,"remaining_time":0.06752540584}, +{"learn":[65.93399844],"iteration":932,"passed_time":0.9261641312,"remaining_time":0.06650910696}, +{"learn":[65.8235334],"iteration":933,"passed_time":0.926836201,"remaining_time":0.06549377866}, +{"learn":[65.73604072],"iteration":934,"passed_time":0.9275453895,"remaining_time":0.06448176505}, +{"learn":[65.61843396],"iteration":935,"passed_time":0.928207517,"remaining_time":0.06346718065}, +{"learn":[65.59111518],"iteration":936,"passed_time":0.9288786774,"remaining_time":0.06245395589}, +{"learn":[65.49802554],"iteration":937,"passed_time":0.9295829265,"remaining_time":0.0614436476}, +{"learn":[65.44474624],"iteration":938,"passed_time":0.9303085666,"remaining_time":0.06043538079}, +{"learn":[65.43646462],"iteration":939,"passed_time":0.9309693488,"remaining_time":0.05942357546}, +{"learn":[65.42790969],"iteration":940,"passed_time":0.931640897,"remaining_time":0.0584131912}, +{"learn":[65.4183837],"iteration":941,"passed_time":0.9323281625,"remaining_time":0.05740449408}, +{"learn":[65.31026144],"iteration":942,"passed_time":0.9330164646,"remaining_time":0.05639654134}, +{"learn":[65.24153336],"iteration":943,"passed_time":0.9336907842,"remaining_time":0.05538843635}, +{"learn":[65.06807618],"iteration":944,"passed_time":0.934371737,"remaining_time":0.05438142385}, +{"learn":[64.93037422],"iteration":945,"passed_time":0.9350472342,"remaining_time":0.05337478927}, +{"learn":[64.7980956],"iteration":946,"passed_time":0.9357242305,"remaining_time":0.05236893793}, +{"learn":[64.62992858],"iteration":947,"passed_time":0.9367434177,"remaining_time":0.05138255034}, +{"learn":[64.62299463],"iteration":948,"passed_time":0.9374845798,"remaining_time":0.05038115234}, +{"learn":[64.5323349],"iteration":949,"passed_time":0.9382463575,"remaining_time":0.04938138724}, +{"learn":[64.34052541],"iteration":950,"passed_time":0.9388725834,"remaining_time":0.04837513837}, +{"learn":[64.2069101],"iteration":951,"passed_time":0.9395657141,"remaining_time":0.04737306122}, +{"learn":[64.15975141],"iteration":952,"passed_time":0.9405498281,"remaining_time":0.04638598313}, +{"learn":[64.03350651],"iteration":953,"passed_time":0.9412621053,"remaining_time":0.04538580382}, +{"learn":[63.90578693],"iteration":954,"passed_time":0.9420031131,"remaining_time":0.04438758125}, +{"learn":[63.87886555],"iteration":955,"passed_time":0.9426550037,"remaining_time":0.04338579515}, +{"learn":[63.83229954],"iteration":956,"passed_time":0.9433073082,"remaining_time":0.04238475888}, +{"learn":[63.7567763],"iteration":957,"passed_time":0.943974997,"remaining_time":0.04138512513}, +{"learn":[63.68328044],"iteration":958,"passed_time":0.9446794015,"remaining_time":0.04038775335}, +{"learn":[63.55163865],"iteration":959,"passed_time":0.945302633,"remaining_time":0.03938760971}, +{"learn":[63.48732217],"iteration":960,"passed_time":0.9459853001,"remaining_time":0.03839066254}, +{"learn":[63.42671472],"iteration":961,"passed_time":0.9466663113,"remaining_time":0.03739430336}, +{"learn":[63.3626304],"iteration":962,"passed_time":0.9473322606,"remaining_time":0.0363980204}, +{"learn":[63.33717356],"iteration":963,"passed_time":0.9479984689,"remaining_time":0.03540243245}, +{"learn":[63.3206294],"iteration":964,"passed_time":0.9486610468,"remaining_time":0.03440739548}, +{"learn":[63.30827576],"iteration":965,"passed_time":0.9493311151,"remaining_time":0.03341331047}, +{"learn":[63.23982978],"iteration":966,"passed_time":0.9499752679,"remaining_time":0.03241901121}, +{"learn":[63.17613845],"iteration":967,"passed_time":0.9506272401,"remaining_time":0.03142569389}, +{"learn":[63.07928415],"iteration":968,"passed_time":0.9512932974,"remaining_time":0.0304335317}, +{"learn":[62.95135955],"iteration":969,"passed_time":0.9523572382,"remaining_time":0.02945434757}, +{"learn":[62.87113745],"iteration":970,"passed_time":0.9530184021,"remaining_time":0.02846295949}, +{"learn":[62.82066775],"iteration":971,"passed_time":0.953659291,"remaining_time":0.02747166682}, +{"learn":[62.74025346],"iteration":972,"passed_time":0.9542564409,"remaining_time":0.02647988068}, +{"learn":[62.62144085],"iteration":973,"passed_time":0.9553690223,"remaining_time":0.02550266384}, +{"learn":[62.48589189],"iteration":974,"passed_time":0.9562045606,"remaining_time":0.02451806566}, +{"learn":[62.40194554],"iteration":975,"passed_time":0.9570816583,"remaining_time":0.02353479488}, +{"learn":[62.33991243],"iteration":976,"passed_time":0.9578284472,"remaining_time":0.02254867378}, +{"learn":[62.27653795],"iteration":977,"passed_time":0.9584744168,"remaining_time":0.0215607742}, +{"learn":[62.17473657],"iteration":978,"passed_time":0.9591486392,"remaining_time":0.02057417919}, +{"learn":[62.1322206],"iteration":979,"passed_time":0.9598025346,"remaining_time":0.01958780683}, +{"learn":[62.02976272],"iteration":980,"passed_time":0.9604934913,"remaining_time":0.01860283011}, +{"learn":[61.9294509],"iteration":981,"passed_time":0.9611682927,"remaining_time":0.01761815608}, +{"learn":[61.73615413],"iteration":982,"passed_time":0.9618175661,"remaining_time":0.01663367103}, +{"learn":[61.67722996],"iteration":983,"passed_time":0.9627553373,"remaining_time":0.01565455833}, +{"learn":[61.65371826],"iteration":984,"passed_time":0.9634245559,"remaining_time":0.01467143994}, +{"learn":[61.51917595],"iteration":985,"passed_time":0.9641344901,"remaining_time":0.01368953637}, +{"learn":[61.44883623],"iteration":986,"passed_time":0.964788122,"remaining_time":0.01270744234}, +{"learn":[61.40103852],"iteration":987,"passed_time":0.9654705323,"remaining_time":0.01172636274}, +{"learn":[61.39355022],"iteration":988,"passed_time":0.9661385238,"remaining_time":0.01074572676}, +{"learn":[61.28668955],"iteration":989,"passed_time":0.9676003994,"remaining_time":0.009773741409}, +{"learn":[61.24210144],"iteration":990,"passed_time":0.9683493698,"remaining_time":0.008794292964}, +{"learn":[61.15201253],"iteration":991,"passed_time":0.9691064685,"remaining_time":0.007815374746}, +{"learn":[61.13945445],"iteration":992,"passed_time":0.9698210421,"remaining_time":0.006836603519}, +{"learn":[61.10391857],"iteration":993,"passed_time":0.9707439249,"remaining_time":0.005859621277}, +{"learn":[61.06796975],"iteration":994,"passed_time":0.971422968,"remaining_time":0.004881522452}, +{"learn":[61.035241],"iteration":995,"passed_time":0.9721036819,"remaining_time":0.003904030851}, +{"learn":[60.98057681],"iteration":996,"passed_time":0.9743745718,"remaining_time":0.002931919474}, +{"learn":[60.92399428],"iteration":997,"passed_time":0.975100777,"remaining_time":0.001954109774}, +{"learn":[60.843292],"iteration":998,"passed_time":0.9758236521,"remaining_time":0.0009768004526}, +{"learn":[60.74726042],"iteration":999,"passed_time":0.976584764,"remaining_time":0} ]} \ No newline at end of file diff --git a/catboost_info/learn/events.out.tfevents b/catboost_info/learn/events.out.tfevents index 75c9f5d..aba366b 100644 Binary files a/catboost_info/learn/events.out.tfevents and b/catboost_info/learn/events.out.tfevents differ diff --git a/catboost_info/learn_error.tsv b/catboost_info/learn_error.tsv index 7382838..29c3c44 100644 --- a/catboost_info/learn_error.tsv +++ b/catboost_info/learn_error.tsv @@ -1,1001 +1,1001 @@ iter RMSE -0 3662.575683 -1 3555.636322 -2 3450.779167 -3 3350.517801 -4 3255.662239 -5 3168.341562 -6 3083.09696 -7 3001.252663 -8 2917.435051 -9 2837.145765 -10 2766.450459 -11 2688.914783 -12 2614.472366 -13 2548.570089 -14 2490.007404 -15 2428.190922 -16 2363.192483 -17 2309.161851 -18 2251.326317 -19 2194.518603 -20 2134.386896 -21 2077.449324 -22 2027.220285 -23 1984.605605 -24 1931.101868 -25 1893.116137 -26 1842.931837 -27 1795.250205 -28 1751.063766 -29 1712.884824 -30 1667.80013 -31 1630.782543 -32 1590.474703 -33 1551.389736 -34 1516.435531 -35 1484.889881 -36 1452.681436 -37 1419.998505 -38 1389.412263 -39 1363.812251 -40 1340.371116 -41 1309.517514 -42 1275.195616 -43 1246.278496 -44 1218.451233 -45 1198.54566 -46 1176.814775 -47 1153.401899 -48 1129.163213 -49 1106.535124 -50 1080.936692 -51 1064.986137 -52 1045.057952 -53 1025.324028 -54 1006.478968 -55 987.1865749 -56 975.2369814 -57 955.5744159 -58 941.1777923 -59 926.5090454 -60 912.6126438 -61 895.292879 -62 878.3101311 -63 862.0548494 -64 846.6412955 -65 831.7313933 -66 816.1886926 -67 804.041348 -68 790.9289148 -69 780.494202 -70 765.6299333 -71 754.0364757 -72 742.6843646 -73 733.6486191 -74 724.4082592 -75 714.3612708 -76 703.7413048 -77 692.6766437 -78 682.658587 -79 672.2667398 -80 662.2697796 -81 653.6006303 -82 647.5304061 -83 640.5592447 -84 630.0335231 -85 625.4874873 -86 617.861395 -87 611.4852683 -88 604.7049078 -89 599.1922856 -90 591.5441643 -91 585.3771005 -92 579.0166045 -93 571.6345746 -94 563.7088364 -95 558.8177793 -96 552.8642371 -97 545.7509079 -98 539.9432238 -99 534.1401375 -100 527.7113585 -101 523.0913521 -102 518.1808209 -103 515.1524114 -104 510.9040838 -105 507.4108072 -106 503.8051705 -107 499.9996939 -108 494.3140036 -109 490.6963987 -110 486.4031605 -111 482.2785595 -112 479.8949034 -113 477.481256 -114 473.4848601 -115 470.8859057 -116 467.34591 -117 461.7794319 -118 458.8907088 -119 455.6145374 -120 452.7000667 -121 449.6290258 -122 446.554963 -123 444.1306578 -124 442.339996 -125 438.6627661 -126 436.8974745 -127 435.2446235 -128 433.1007063 -129 428.7936747 -130 425.4447815 -131 422.4564141 -132 420.3704177 -133 418.3308567 -134 416.355286 -135 413.2683151 -136 409.2783872 -137 407.2469786 -138 404.9461056 -139 402.902743 -140 401.1939857 -141 399.2151519 -142 396.7982124 -143 395.1551058 -144 393.8379304 -145 392.1757867 -146 390.0361905 -147 387.759463 -148 385.5603947 -149 383.4831287 -150 380.7027889 -151 379.1509263 -152 377.5054091 -153 376.3544887 -154 374.7216306 -155 371.8227673 -156 368.8293011 -157 367.3554223 -158 365.6957606 -159 363.4291725 -160 362.1293855 -161 359.8814575 -162 357.4070103 -163 355.2431096 -164 352.552065 -165 349.7138766 -166 347.4891863 -167 345.5489662 -168 341.5699121 -169 338.2907206 -170 336.0732474 -171 334.6253265 -172 333.6199762 -173 331.927306 -174 330.7664346 -175 327.600505 -176 325.6325653 -177 324.3582431 -178 323.0986648 -179 321.3015869 -180 319.9619277 -181 318.5357961 -182 317.3093454 -183 315.6279442 -184 313.5985253 -185 312.2128824 -186 310.5408064 -187 308.8097527 -188 307.444303 -189 305.5703591 -190 304.1410151 -191 302.5345375 -192 301.645134 -193 300.4121176 -194 298.5525362 -195 296.9420221 -196 295.9578764 -197 295.0625029 -198 294.1030487 -199 293.6409424 -200 292.418178 -201 290.7077158 -202 289.7255389 -203 288.2508488 -204 286.7868578 -205 286.086537 -206 285.2264005 -207 284.0864334 -208 282.8510716 -209 282.0865003 -210 281.1298771 -211 280.1903001 -212 279.0970471 -213 277.9367672 -214 276.7751341 -215 275.9816879 -216 275.0941078 -217 274.0984843 -218 273.783232 -219 272.2569337 -220 271.4275366 -221 268.932575 -222 267.6229378 -223 266.5785284 -224 265.398546 -225 263.8963923 -226 262.8240575 -227 261.7816442 -228 260.8591568 -229 259.7074649 -230 258.3753881 -231 257.5815782 -232 256.6962939 -233 255.5210063 -234 254.8841884 -235 254.0187639 -236 252.9263803 -237 251.9325468 -238 250.643096 -239 249.6721803 -240 248.7723228 -241 248.0043812 -242 247.3321548 -243 246.7967875 -244 246.0910573 -245 243.8962219 -246 242.8598926 -247 242.4047449 -248 241.8658878 -249 241.2068841 -250 240.3944328 -251 239.8042636 -252 239.2380851 -253 238.7913906 -254 237.7871852 -255 237.4230529 -256 236.9503851 -257 236.5353735 -258 236.0385457 -259 235.1681646 -260 233.9263754 -261 233.4651113 -262 232.8769228 -263 232.3156672 -264 231.9137269 -265 231.4086531 -266 230.7801018 -267 230.3125671 -268 229.8171138 -269 228.9896699 -270 228.1907234 -271 227.5037554 -272 227.0812216 -273 226.531358 -274 225.9401478 -275 225.5392312 -276 224.4404722 -277 222.9875036 -278 222.0576336 -279 221.4001301 -280 220.6016936 -281 220.0410548 -282 219.6337237 -283 219.3044009 -284 218.776607 -285 217.8416864 -286 217.4529483 -287 217.0905909 -288 216.0690431 -289 215.7059666 -290 215.2980626 -291 214.6205749 -292 214.3146893 -293 213.4974494 -294 213.0350907 -295 212.6353058 -296 211.3935718 -297 210.420639 -298 209.6471807 -299 209.321792 -300 208.5385184 -301 207.4529122 -302 207.1634334 -303 206.7215229 -304 205.8254902 -305 204.9402971 -306 204.2401925 -307 203.830478 -308 203.4878609 -309 202.7358657 -310 201.6776776 -311 201.2502754 -312 200.4058849 -313 200.0863728 -314 199.4424797 -315 198.434697 -316 197.6092453 -317 197.4347459 -318 197.1730434 -319 196.3205483 -320 196.1442192 -321 195.1358489 -322 194.7654394 -323 194.5257029 -324 194.3460045 -325 193.5423137 -326 193.361594 -327 192.8609113 -328 192.0152368 -329 191.2984363 -330 190.7528647 -331 190.0073779 -332 189.1785006 -333 188.5504856 -334 188.0987857 -335 187.3100528 -336 187.0695173 -337 186.3384284 -338 185.8397918 -339 185.5281498 -340 185.0772929 -341 184.3896485 -342 183.9436706 -343 182.943391 -344 182.1191152 -345 181.9430507 -346 181.2225343 -347 180.7098556 -348 180.1134451 -349 179.9030133 -350 179.5222111 -351 178.6909028 -352 178.3826558 -353 178.0647718 -354 177.0587936 -355 176.7722999 -356 176.4841216 -357 176.0194343 -358 175.7636755 -359 175.0494995 -360 174.8436133 -361 174.5320971 -362 173.8910897 -363 173.2930522 -364 172.7834317 -365 172.5451464 -366 172.2138011 -367 171.2132824 -368 171.050042 -369 170.643188 -370 170.2444386 -371 169.6615289 -372 169.2398256 -373 169.1089687 -374 168.3913973 -375 168.0109707 -376 167.6623955 -377 167.4718941 -378 167.1617279 -379 166.8382257 -380 166.5470885 -381 165.9805333 -382 165.8072213 -383 165.2007912 -384 164.7921382 -385 164.5110541 -386 163.8643792 -387 163.164661 -388 162.7088389 -389 162.0327601 -390 161.7854959 -391 161.3755056 -392 160.7323132 -393 160.6155798 -394 160.3386438 -395 159.8353012 -396 159.1391019 -397 158.4855614 -398 158.0854374 -399 157.8456813 -400 157.4880514 -401 156.9881906 -402 156.5677653 -403 156.3574827 -404 156.2451016 -405 155.9302814 -406 155.3032846 -407 154.4769299 -408 154.2897312 -409 154.1934428 -410 153.5724007 -411 153.0736612 -412 152.9564476 -413 152.3993793 -414 151.7975618 -415 151.2709445 -416 150.9104339 -417 150.3215665 -418 149.8387959 -419 149.54926 -420 149.1502195 -421 148.9274928 -422 148.6005806 -423 148.2908845 -424 147.9269959 -425 147.4632561 -426 146.8663669 -427 146.5109632 -428 146.1289295 -429 145.7040286 -430 145.2840842 -431 145.0408079 -432 144.7811929 -433 144.3322422 -434 144.2214218 -435 143.9688619 -436 143.6083057 -437 143.2156197 -438 142.8357081 -439 142.5387687 -440 142.2042503 -441 142.0426301 -442 141.8952894 -443 141.6464236 -444 141.5411481 -445 141.0823141 -446 140.4287564 -447 140.1803918 -448 139.9958853 -449 139.7426692 -450 139.1196939 -451 138.6960149 -452 138.4218897 -453 138.2403828 -454 138.0915511 -455 137.9493266 -456 137.7770819 -457 137.3442044 -458 137.1171304 -459 136.6867566 -460 136.518458 -461 136.0988944 -462 136.0065977 -463 135.9112619 -464 135.6242991 -465 135.5102452 -466 135.1636411 -467 135.0992177 -468 134.8365283 -469 134.7463366 -470 134.587287 -471 134.4480214 -472 133.9584806 -473 133.8098951 -474 133.4703971 -475 133.0607757 -476 132.8214312 -477 132.6378881 -478 132.3500025 -479 132.029824 -480 131.802763 -481 131.4477917 -482 131.340861 -483 131.138128 -484 130.8254085 -485 130.6226731 -486 130.5353426 -487 130.2002559 -488 129.8650409 -489 129.7614838 -490 129.7035868 -491 129.55131 -492 128.9822814 -493 128.7051594 -494 128.6011681 -495 128.4308561 -496 128.307177 -497 128.0653125 -498 127.6727011 -499 127.4025463 -500 127.2504625 -501 126.7573321 -502 126.5927506 -503 126.3386114 -504 125.9602791 -505 125.7382376 -506 125.5238531 -507 125.4034315 -508 125.108111 -509 124.888637 -510 124.5410307 -511 124.4532414 -512 124.2402559 -513 124.0493354 -514 123.8903048 -515 123.2513784 -516 122.9060997 -517 122.2659462 -518 121.9153759 -519 121.768553 -520 121.5586427 -521 121.328656 -522 121.2414438 -523 120.9509013 -524 120.7287231 -525 120.5182445 -526 120.1552011 -527 119.8518006 -528 119.5758902 -529 119.4684522 -530 119.1905877 -531 119.120799 -532 118.8522754 -533 118.5679242 -534 118.2441926 -535 118.0571382 -536 117.670673 -537 117.4626467 -538 117.3972691 -539 117.0498585 -540 116.8864118 -541 116.7032135 -542 116.4747407 -543 116.2745499 -544 116.1634434 -545 115.8837329 -546 115.7847364 -547 115.4726518 -548 115.2380594 -549 114.9832854 -550 114.7923225 -551 114.7061856 -552 114.2484883 -553 113.8249474 -554 113.3587873 -555 113.2361388 -556 112.9708343 -557 112.9087039 -558 112.7109537 -559 112.423592 -560 112.1257099 -561 112.030136 -562 111.7072661 -563 111.4030351 -564 111.239361 -565 110.9951902 -566 110.7555032 -567 110.4762113 -568 110.3324024 -569 110.1500777 -570 109.8347114 -571 109.6719111 -572 109.3507972 -573 109.0393228 -574 108.9092308 -575 108.5653394 -576 108.4450533 -577 108.2495707 -578 108.1288216 -579 107.9123132 -580 107.6536187 -581 107.4904367 -582 107.1957928 -583 106.9065549 -584 106.7433792 -585 106.6253206 -586 106.4358702 -587 106.1281316 -588 105.8891673 -589 105.4787768 -590 105.2468209 -591 105.0982909 -592 104.8447079 -593 104.5779736 -594 104.4110502 -595 104.0919646 -596 103.8919657 -597 103.7196598 -598 103.4492347 -599 103.2740587 -600 103.1276456 -601 102.9634171 -602 102.8259149 -603 102.7001608 -604 102.3681297 -605 102.1013055 -606 102.0398696 -607 101.9173139 -608 101.679954 -609 101.4066016 -610 101.3379424 -611 100.9210462 -612 100.6453409 -613 100.4676302 -614 100.3387898 -615 100.2590426 -616 100.0247543 -617 99.99480267 -618 99.81066888 -619 99.68847078 -620 99.31388979 -621 99.14994819 -622 98.97268473 -623 98.8268423 -624 98.75382222 -625 98.5446163 -626 98.31367241 -627 97.98978586 -628 97.88782136 -629 97.66350146 -630 97.55228588 -631 97.22287279 -632 96.94093467 -633 96.90730378 -634 96.72645055 -635 96.51649102 -636 96.48646053 -637 96.28440839 -638 96.12574304 -639 96.10080732 -640 95.89441847 -641 95.66836282 -642 95.45630098 -643 95.38867496 -644 95.36114208 -645 95.32338394 -646 95.13865864 -647 95.04087212 -648 94.80557884 -649 94.73138699 -650 94.58108436 -651 94.36301263 -652 94.31236028 -653 94.27889282 -654 94.10901123 -655 93.96474618 -656 93.7421906 -657 93.60514066 -658 93.46612437 -659 93.33229342 -660 93.21079587 -661 93.10705876 -662 92.81810673 -663 92.62650619 -664 92.42888654 -665 92.09176732 -666 92.04854396 -667 91.88550859 -668 91.85350035 -669 91.77807671 -670 91.65435283 -671 91.56783533 -672 91.26580867 -673 91.16238764 -674 91.10065046 -675 90.85117159 -676 90.69540399 -677 90.57391813 -678 90.51437297 -679 90.33889177 -680 90.24276011 -681 90.10789257 -682 89.95880505 -683 89.87212108 -684 89.7088169 -685 89.65178757 -686 89.45790266 -687 89.28717191 -688 89.01579809 -689 88.8669891 -690 88.6699944 -691 88.56760592 -692 88.44988882 -693 88.27053152 -694 88.2414795 -695 88.15186789 -696 88.05019566 -697 87.99223331 -698 87.73657542 -699 87.66230383 -700 87.55509554 -701 87.3960713 -702 87.35367605 -703 87.31612871 -704 87.28308304 -705 87.10333984 -706 86.95975665 -707 86.74618961 -708 86.59571901 -709 86.5503641 -710 86.37323419 -711 86.33957217 -712 86.14207856 -713 85.97356517 -714 85.82683155 -715 85.66753612 -716 85.59529512 -717 85.54704191 -718 85.44003169 -719 85.35282701 -720 85.11502108 -721 84.99295044 -722 84.84849866 -723 84.719048 -724 84.61907861 -725 84.50627558 -726 84.40820784 -727 84.26996548 -728 84.13394306 -729 84.07218013 -730 83.96978134 -731 83.87239832 -732 83.68730916 -733 83.59803147 -734 83.50674817 -735 83.40334104 -736 83.34292332 -737 83.2841552 -738 83.1251919 -739 82.95627663 -740 82.86885815 -741 82.71013967 -742 82.65586453 -743 82.49744791 -744 82.41674243 -745 82.29664345 -746 82.15066517 -747 82.0370734 -748 81.86493678 -749 81.6952753 -750 81.52076307 -751 81.47245048 -752 81.33248962 -753 81.17612462 -754 81.03819438 -755 80.96675016 -756 80.82905315 -757 80.70283152 -758 80.59212139 -759 80.47676188 -760 80.39467719 -761 80.29316384 -762 80.25419775 -763 80.18231027 -764 80.03530867 -765 79.882003 -766 79.77233594 -767 79.71948209 -768 79.68256007 -769 79.5728087 -770 79.48980365 -771 79.45344299 -772 79.42383769 -773 79.35304737 -774 79.31777929 -775 79.27584682 -776 79.22300776 -777 79.16336216 -778 79.11542806 -779 79.05297899 -780 79.0055555 -781 78.96247249 -782 78.93486725 -783 78.86074492 -784 78.7980841 -785 78.68675842 -786 78.56844849 -787 78.45522467 -788 78.42915368 -789 78.33271787 -790 78.28517094 -791 78.22550739 -792 78.08128623 -793 78.00227098 -794 77.84685715 -795 77.7664605 -796 77.67018672 -797 77.58910134 -798 77.44372873 -799 77.37497353 -800 77.34882375 -801 77.19068707 -802 77.08922719 -803 76.9729166 -804 76.90348413 -805 76.82666583 -806 76.68116626 -807 76.63374313 -808 76.54946199 -809 76.49248776 -810 76.30567098 -811 76.20513103 -812 76.14947506 -813 76.07795348 -814 76.03071793 -815 75.85114002 -816 75.72457319 -817 75.62457528 -818 75.57980812 -819 75.50199726 -820 75.42896303 -821 75.28107282 -822 75.24370116 -823 75.21163781 -824 75.11238047 -825 75.07821544 -826 74.99763688 -827 74.97260021 -828 74.82280395 -829 74.7142409 -830 74.68809134 -831 74.66735038 -832 74.62791607 -833 74.4614704 -834 74.42822033 -835 74.3786069 -836 74.24628149 -837 74.17289785 -838 74.04392916 -839 73.8944423 -840 73.75377577 -841 73.63426609 -842 73.55942885 -843 73.45438013 -844 73.2753604 -845 73.10596468 -846 73.05334097 -847 72.9262698 -848 72.79142922 -849 72.66419711 -850 72.46903723 -851 72.34320795 -852 72.23244127 -853 72.20557751 -854 72.10773374 -855 72.0656456 -856 71.9769982 -857 71.8705715 -858 71.79586444 -859 71.73351722 -860 71.71965344 -861 71.66132076 -862 71.55484646 -863 71.53760399 -864 71.50761043 -865 71.36594699 -866 71.19238423 -867 71.17217622 -868 71.07745294 -869 70.97565875 -870 70.89321102 -871 70.85875957 -872 70.75038623 -873 70.6590808 -874 70.58091606 -875 70.44697908 -876 70.36441182 -877 70.20024085 -878 70.1359221 -879 70.03250378 -880 69.93876123 -881 69.83200036 -882 69.62171708 -883 69.51234224 -884 69.36816508 -885 69.26007535 -886 69.17361388 -887 69.0210084 -888 68.88465565 -889 68.74682108 -890 68.70150075 -891 68.63792821 -892 68.57925914 -893 68.51565267 -894 68.42777149 -895 68.39382409 -896 68.30705352 -897 68.1673343 -898 68.13581243 -899 68.01723987 -900 67.94841632 -901 67.83548288 -902 67.75339725 -903 67.61928604 -904 67.47890298 -905 67.40245754 -906 67.25332785 -907 67.18688177 -908 67.12462282 -909 67.09869793 -910 66.92775555 -911 66.79737891 -912 66.61888136 -913 66.52898648 -914 66.42846973 -915 66.37536759 -916 66.33675846 -917 66.18297625 -918 66.05337085 -919 65.98687591 -920 65.97236793 -921 65.95830318 -922 65.93802431 -923 65.88563313 -924 65.7892447 -925 65.67058669 -926 65.63703386 -927 65.5087068 -928 65.37954195 -929 65.3183101 -930 65.20737401 -931 65.17874923 -932 65.08987741 -933 64.99861268 -934 64.87984454 -935 64.80972424 -936 64.73814004 -937 64.65819097 -938 64.5587909 -939 64.44593228 -940 64.39097823 -941 64.27577767 -942 64.15712342 -943 64.06704534 -944 64.04056429 -945 63.9472476 -946 63.8379816 -947 63.72808541 -948 63.63637843 -949 63.57742148 -950 63.512264 -951 63.36761033 -952 63.30219425 -953 63.27483452 -954 63.23222472 -955 63.16959836 -956 63.05808935 -957 62.99698818 -958 62.89499542 -959 62.85488529 -960 62.75814145 -961 62.67031583 -962 62.59874788 -963 62.54413822 -964 62.45073587 -965 62.3819076 -966 62.34907098 -967 62.25972654 -968 62.20674847 -969 62.16446083 -970 62.08235998 -971 61.96334733 -972 61.88442894 -973 61.79490922 -974 61.64128691 -975 61.55177583 -976 61.51457543 -977 61.42560945 -978 61.40207171 -979 61.30660088 -980 61.268583 -981 61.22934796 -982 61.20616682 -983 61.08147013 -984 61.05608199 -985 61.0355597 -986 61.00908742 -987 60.92877525 -988 60.85971462 -989 60.74474777 -990 60.62859732 -991 60.53948158 -992 60.47245961 -993 60.3302556 -994 60.24396727 -995 60.1186116 -996 60.05543163 -997 60.02251974 -998 59.94975844 -999 59.92150919 +0 3606.387858 +1 3518.319482 +2 3419.905128 +3 3332.348887 +4 3241.127893 +5 3157.197799 +6 3084.268305 +7 2999.831771 +8 2922.61658 +9 2850.066213 +10 2787.171725 +11 2712.631573 +12 2639.240862 +13 2580.881397 +14 2521.58662 +15 2464.607771 +16 2401.172352 +17 2342.65902 +18 2289.300237 +19 2235.975037 +20 2178.182547 +21 2125.421774 +22 2077.423716 +23 2036.067952 +24 1991.236961 +25 1947.239582 +26 1898.356448 +27 1852.598884 +28 1805.98091 +29 1769.275215 +30 1732.261276 +31 1698.06551 +32 1662.095506 +33 1625.53386 +34 1589.035018 +35 1557.617707 +36 1527.889061 +37 1493.967086 +38 1465.168414 +39 1436.754665 +40 1409.060957 +41 1383.04592 +42 1352.152048 +43 1326.002392 +44 1298.795677 +45 1277.59462 +46 1253.978103 +47 1230.190699 +48 1203.967689 +49 1182.19517 +50 1164.258966 +51 1141.966488 +52 1120.815492 +53 1101.092758 +54 1085.964328 +55 1068.29696 +56 1050.669356 +57 1034.918083 +58 1015.911816 +59 996.1684752 +60 977.4736183 +61 961.5096755 +62 944.211636 +63 928.0566653 +64 915.6840108 +65 898.1121629 +66 882.9822656 +67 873.9766077 +68 859.858408 +69 843.5547062 +70 831.306257 +71 820.3099516 +72 807.1855686 +73 794.169211 +74 783.9636837 +75 776.4972272 +76 767.6959848 +77 755.1965137 +78 744.4263707 +79 737.761998 +80 729.7130391 +81 722.3074503 +82 716.8086447 +83 709.6708335 +84 699.0944538 +85 693.2289681 +86 685.4228057 +87 676.9562967 +88 670.2366575 +89 666.4112163 +90 659.6851751 +91 650.5406821 +92 642.437192 +93 637.5808176 +94 630.4765451 +95 626.0489731 +96 621.6103651 +97 614.0547972 +98 607.9423586 +99 600.4874848 +100 592.9412018 +101 588.1187919 +102 581.0641089 +103 574.2759513 +104 569.9720728 +105 566.190281 +106 562.1953603 +107 557.4002281 +108 553.1702731 +109 548.5889262 +110 543.4056498 +111 538.4832325 +112 535.2458573 +113 530.1384428 +114 525.1436712 +115 520.6876088 +116 514.513666 +117 509.7961508 +118 506.6708641 +119 502.5185268 +120 499.116907 +121 494.894429 +122 490.7216202 +123 486.660548 +124 484.8822804 +125 480.6495083 +126 476.774821 +127 472.7333979 +128 468.6643234 +129 464.4447997 +130 460.6841863 +131 456.2935225 +132 453.1949553 +133 450.8314019 +134 447.9792497 +135 444.1524308 +136 440.3004227 +137 437.5089858 +138 434.4987658 +139 431.333326 +140 429.2145835 +141 426.2960991 +142 423.9875382 +143 422.3177707 +144 421.2376606 +145 417.7776491 +146 416.1531592 +147 412.1591115 +148 409.1819197 +149 406.5238175 +150 404.1942423 +151 400.2960048 +152 398.2802551 +153 395.9698537 +154 393.9176761 +155 390.6531762 +156 387.9927685 +157 386.1000681 +158 382.8113362 +159 379.6496615 +160 378.3378875 +161 376.0755551 +162 374.0972576 +163 371.8931312 +164 368.0549992 +165 366.6124142 +166 364.8857903 +167 362.2252152 +168 360.9085148 +169 358.6451278 +170 355.0984419 +171 354.0569022 +172 351.6524348 +173 349.2576632 +174 347.7399362 +175 346.3216673 +176 345.3573698 +177 344.3890702 +178 342.0730746 +179 340.2167814 +180 338.6962004 +181 337.5122711 +182 334.5157453 +183 333.3058789 +184 332.8028753 +185 331.0744585 +186 329.8548976 +187 327.869795 +188 326.5184806 +189 325.4357906 +190 323.3430059 +191 321.5208687 +192 319.5760156 +193 317.7506047 +194 316.1839605 +195 314.198558 +196 312.3917238 +197 311.3355559 +198 310.3982421 +199 308.6559812 +200 307.3273653 +201 305.8939696 +202 303.8927472 +203 302.7532426 +204 301.1496647 +205 300.1434351 +206 299.3006517 +207 298.4622687 +208 297.1605991 +209 295.8648262 +210 293.5810869 +211 292.8127275 +212 292.0236736 +213 290.2514839 +214 288.2243426 +215 286.8670898 +216 285.5220573 +217 283.7910197 +218 282.2997544 +219 280.8603639 +220 279.7272082 +221 278.1697266 +222 276.9547641 +223 275.9620385 +224 274.6320485 +225 273.6185828 +226 272.4580245 +227 271.4426343 +228 270.7657401 +229 269.4458688 +230 268.5788803 +231 267.0961414 +232 266.0051701 +233 264.5156592 +234 263.1693637 +235 262.2827066 +236 261.9286628 +237 261.482238 +238 260.3457298 +239 259.7332775 +240 259.3165229 +241 258.7312612 +242 257.2398668 +243 256.7958772 +244 256.0899879 +245 255.4514517 +246 253.7741395 +247 253.4455383 +248 252.1779053 +249 251.5443517 +250 250.6149484 +251 250.0460461 +252 249.2557283 +253 248.5946517 +254 247.3165644 +255 246.0656813 +256 245.4258742 +257 244.8982327 +258 244.051275 +259 243.1550562 +260 242.3001204 +261 241.1597182 +262 240.6731094 +263 240.2473054 +264 239.1251422 +265 237.9541634 +266 236.9556174 +267 235.5577891 +268 234.9502464 +269 234.0494811 +270 233.1372456 +271 232.8750573 +272 232.2241787 +273 231.5673968 +274 230.7525811 +275 230.2074729 +276 229.9528668 +277 229.7334938 +278 228.6220546 +279 228.4356848 +280 227.5121516 +281 226.5496263 +282 225.5877827 +283 224.7683111 +284 224.0297309 +285 223.3259847 +286 222.6383996 +287 221.9632532 +288 221.2404351 +289 220.5276791 +290 219.5688789 +291 218.5386855 +292 217.8530368 +293 216.6832815 +294 216.2709057 +295 215.438626 +296 214.5969252 +297 214.4329592 +298 213.7404229 +299 213.2976718 +300 213.0368616 +301 212.8299923 +302 212.2011472 +303 211.9407753 +304 210.7295623 +305 210.1997047 +306 209.6083914 +307 209.0600739 +308 208.8628804 +309 207.9809195 +310 207.7482659 +311 207.6323486 +312 206.190741 +313 205.6996807 +314 205.4047318 +315 205.2218503 +316 204.241677 +317 203.9209904 +318 203.7444036 +319 202.7247697 +320 202.1082808 +321 201.9474572 +322 201.7857796 +323 201.4609255 +324 201.2312686 +325 200.5681497 +326 200.4091687 +327 199.5892878 +328 199.4371002 +329 198.3931701 +330 197.9225317 +331 197.3986823 +332 197.0484135 +333 195.7574594 +334 195.6491527 +335 195.4817842 +336 194.7880145 +337 194.4113957 +338 194.0318356 +339 193.5284278 +340 192.5991816 +341 192.1031854 +342 191.5491337 +343 191.1828464 +344 189.8709412 +345 189.2616361 +346 188.4713385 +347 188.3002161 +348 187.7644675 +349 187.3059385 +350 187.1764166 +351 186.7702198 +352 186.2233832 +353 186.1274261 +354 185.636063 +355 185.1812839 +356 185.068081 +357 183.8599331 +358 183.6914997 +359 183.3436455 +360 182.9215853 +361 182.3247868 +362 181.5896693 +363 180.8062462 +364 180.1039249 +365 179.9705744 +366 179.8880634 +367 179.5510833 +368 178.6255684 +369 178.0825683 +370 177.5440763 +371 177.0171958 +372 176.7896567 +373 176.149964 +374 175.3705414 +375 175.0059888 +376 174.5529551 +377 174.4630831 +378 174.2530752 +379 174.1312762 +380 173.0735198 +381 172.9912893 +382 172.640672 +383 172.5681628 +384 172.4643172 +385 171.7777603 +386 171.6906621 +387 170.6872263 +388 170.514087 +389 170.3233994 +390 169.8846439 +391 169.529393 +392 168.9009519 +393 168.2725977 +394 168.1686093 +395 168.0934607 +396 167.508123 +397 167.4163222 +398 166.9013374 +399 166.6137216 +400 165.7003187 +401 165.5639131 +402 164.9306567 +403 164.8399144 +404 164.0876912 +405 163.5523537 +406 163.2875856 +407 162.998686 +408 162.5089806 +409 162.2376064 +410 161.6910172 +411 161.5034709 +412 161.3250696 +413 160.6313335 +414 160.3742082 +415 159.7341574 +416 159.664954 +417 159.5420211 +418 159.4801209 +419 159.3536466 +420 159.2770295 +421 158.561971 +422 157.9686295 +423 157.8587884 +424 157.3191629 +425 156.8336449 +426 156.4918462 +427 156.145352 +428 156.0697038 +429 155.9134811 +430 155.678063 +431 155.4965466 +432 155.0397607 +433 154.6821414 +434 154.6232933 +435 154.1938736 +436 153.8281113 +437 153.3348882 +438 153.2388265 +439 153.1804252 +440 153.0976746 +441 152.7513308 +442 152.3535668 +443 152.110953 +444 151.8156996 +445 151.2831641 +446 150.8180434 +447 150.7537961 +448 150.2536504 +449 149.8562556 +450 149.4497248 +451 149.2791359 +452 148.8058612 +453 148.419447 +454 148.0003908 +455 147.1557482 +456 146.7813221 +457 146.63709 +458 145.8417458 +459 145.7921029 +460 145.69582 +461 145.6319971 +462 145.5926065 +463 145.2514255 +464 145.0422767 +465 144.6941249 +466 144.1409204 +467 144.0384521 +468 143.9539897 +469 143.7129095 +470 143.6364716 +471 143.5557396 +472 142.9670765 +473 142.5645332 +474 141.787664 +475 141.3298725 +476 141.2575251 +477 141.1857064 +478 140.7815123 +479 140.6905512 +480 140.6240735 +481 140.1605819 +482 139.9923119 +483 139.5946379 +484 139.5477815 +485 139.0551136 +486 139.0083275 +487 138.8115081 +488 138.3582187 +489 137.9994483 +490 137.2762793 +491 137.2319494 +492 136.8284401 +493 136.0778994 +494 135.7573861 +495 135.2420576 +496 134.5639228 +497 134.1998245 +498 133.9053919 +499 133.6473876 +500 133.2527423 +501 133.1154401 +502 132.7053765 +503 132.6254567 +504 132.42907 +505 132.0886631 +506 132.0251257 +507 131.8522651 +508 131.7043809 +509 131.6551685 +510 131.3064159 +511 131.2539164 +512 130.802881 +513 130.7267198 +514 130.6751541 +515 130.3496136 +516 129.8251839 +517 129.7822498 +518 129.5051843 +519 129.3134521 +520 128.9161601 +521 128.4843352 +522 128.3649809 +523 127.7238357 +524 127.6690506 +525 127.3190322 +526 126.7021578 +527 126.32958 +528 125.9108245 +529 125.332027 +530 125.1915011 +531 124.5991005 +532 124.277708 +533 124.1804067 +534 124.0032042 +535 123.91302 +536 123.6086038 +537 123.0597351 +538 122.7188736 +539 122.5133584 +540 122.1510768 +541 122.1148408 +542 121.7833691 +543 121.7468344 +544 121.7049673 +545 121.4597814 +546 121.2510597 +547 121.1959397 +548 121.0473791 +549 120.7324814 +550 120.4692276 +551 120.3943509 +552 120.3597888 +553 120.0759583 +554 119.9362893 +555 119.6710969 +556 119.3271803 +557 119.0593249 +558 118.9329291 +559 118.418452 +560 118.1574761 +561 117.7196836 +562 117.3428279 +563 117.186318 +564 117.1112686 +565 116.8819815 +566 116.6706405 +567 116.2121918 +568 116.0962801 +569 116.052204 +570 115.8082428 +571 115.7286278 +572 115.5696183 +573 115.3878373 +574 115.1568175 +575 114.9132003 +576 114.864146 +577 114.7956804 +578 114.4332725 +579 114.2023529 +580 114.1078602 +581 113.7429863 +582 113.4915904 +583 113.3195407 +584 112.84174 +585 112.4972889 +586 112.4118409 +587 112.3541925 +588 112.1993478 +589 112.0858921 +590 112.0542527 +591 112.0106512 +592 111.7676748 +593 111.6282142 +594 111.5652245 +595 111.3232837 +596 111.1573668 +597 110.9416805 +598 110.8623598 +599 110.5618879 +600 110.4440189 +601 110.3327871 +602 110.1214915 +603 109.9496065 +604 109.6583282 +605 109.6044222 +606 109.4820211 +607 109.2698381 +608 109.1570669 +609 109.1252474 +610 108.9188958 +611 108.7261926 +612 108.5619027 +613 108.5113332 +614 108.4806216 +615 108.3221197 +616 108.0966532 +617 107.8732155 +618 107.8368224 +619 107.7420721 +620 107.5455535 +621 107.3268846 +622 107.2784579 +623 107.2004171 +624 106.9064835 +625 106.4321729 +626 106.3370654 +627 106.0784342 +628 106.0356971 +629 105.793307 +630 105.772673 +631 105.5621032 +632 105.5327179 +633 105.4297935 +634 105.349791 +635 105.3207121 +636 105.0587917 +637 104.9291032 +638 104.7230249 +639 104.5480841 +640 104.3438205 +641 104.3146935 +642 104.0057678 +643 103.9290039 +644 103.6376607 +645 103.3721232 +646 103.1844589 +647 103.0468138 +648 102.9096825 +649 102.8911943 +650 102.7599087 +651 102.5024037 +652 102.1011329 +653 101.8532865 +654 101.7601504 +655 101.6918072 +656 101.5773558 +657 101.4480806 +658 101.3912872 +659 101.1216664 +660 100.9894583 +661 100.7710811 +662 100.3719608 +663 100.1535745 +664 99.90273285 +665 99.75122622 +666 99.59773921 +667 99.45590455 +668 99.35142904 +669 98.97802968 +670 98.63061279 +671 98.33636214 +672 98.09710463 +673 97.83469051 +674 97.65662438 +675 97.41941931 +676 97.16789393 +677 96.79614151 +678 96.63508251 +679 96.48027557 +680 96.23638366 +681 96.08591125 +682 95.96102642 +683 95.73700045 +684 95.59880493 +685 95.57022852 +686 95.54346281 +687 95.30037527 +688 95.14140291 +689 95.00647918 +690 94.90320875 +691 94.67619173 +692 94.44599217 +693 94.2643355 +694 94.09580964 +695 93.88412934 +696 93.71874897 +697 93.5744453 +698 93.52003371 +699 93.28456464 +700 93.123137 +701 92.85437766 +702 92.82821716 +703 92.80801325 +704 92.67006319 +705 92.45803962 +706 92.26768076 +707 92.04517711 +708 92.01934367 +709 91.69217515 +710 91.41262654 +711 91.22844365 +712 91.0029696 +713 90.91927956 +714 90.78425174 +715 90.65674763 +716 90.53865432 +717 90.39671287 +718 90.24281762 +719 90.10843845 +720 90.07088285 +721 89.75778642 +722 89.44493911 +723 89.33218415 +724 89.22294498 +725 88.99462025 +726 88.87335106 +727 88.72574375 +728 88.60158818 +729 88.44917471 +730 88.39997405 +731 88.28397782 +732 88.25960648 +733 88.04324091 +734 88.02202762 +735 87.71503579 +736 87.52364778 +737 87.28070694 +738 87.14627098 +739 87.00704387 +740 86.92646242 +741 86.82729647 +742 86.78045019 +743 86.59043507 +744 86.36661044 +745 86.16361791 +746 85.99162242 +747 85.97975009 +748 85.72576134 +749 85.55863112 +750 85.53790883 +751 85.48072686 +752 85.33480816 +753 85.21920337 +754 85.1201592 +755 84.98406257 +756 84.84864168 +757 84.83176765 +758 84.63682584 +759 84.45718166 +760 84.2623255 +761 84.06250083 +762 83.98742891 +763 83.9654573 +764 83.80895885 +765 83.68567772 +766 83.50718495 +767 83.46609007 +768 83.45135397 +769 83.2363417 +770 83.17236547 +771 82.91317357 +772 82.79827798 +773 82.70452039 +774 82.54522036 +775 82.36753462 +776 82.14133457 +777 81.94908304 +778 81.78425609 +779 81.74852642 +780 81.55502016 +781 81.3451698 +782 81.31970895 +783 81.27369229 +784 81.21918123 +785 80.9907386 +786 80.83250622 +787 80.70867401 +788 80.5228913 +789 80.41817098 +790 80.38770927 +791 80.20767199 +792 80.09246156 +793 79.91825023 +794 79.88651678 +795 79.7107924 +796 79.59117874 +797 79.42415425 +798 79.38917271 +799 79.31480069 +800 79.25863184 +801 79.19776292 +802 79.07016322 +803 78.95728068 +804 78.882104 +805 78.78759484 +806 78.65679994 +807 78.470072 +808 78.33314013 +809 78.19015712 +810 78.13812619 +811 78.01360261 +812 77.84131133 +813 77.7753725 +814 77.58358531 +815 77.35581633 +816 77.23815501 +817 77.19202898 +818 77.08471226 +819 76.96045327 +820 76.75096178 +821 76.73002244 +822 76.46183148 +823 76.31784696 +824 76.25853616 +825 76.15881617 +826 76.14653618 +827 76.05114431 +828 75.93230304 +829 75.76477165 +830 75.64025906 +831 75.42114584 +832 75.35767403 +833 75.25180038 +834 75.23953429 +835 75.20358883 +836 75.12525159 +837 74.94534918 +838 74.77815877 +839 74.74762084 +840 74.73919979 +841 74.57390243 +842 74.43510998 +843 74.36709609 +844 74.23879576 +845 74.15068154 +846 74.11282123 +847 73.97895275 +848 73.86385892 +849 73.80631361 +850 73.6612263 +851 73.57407119 +852 73.49557136 +853 73.30607327 +854 73.14332929 +855 73.00226481 +856 72.92531658 +857 72.80699591 +858 72.66490284 +859 72.54722229 +860 72.41389546 +861 72.31275977 +862 72.12873668 +863 72.0770661 +864 72.01171287 +865 71.99478489 +866 71.87438088 +867 71.7308027 +868 71.69869645 +869 71.68217077 +870 71.61534523 +871 71.55238737 +872 71.42625749 +873 71.33376108 +874 71.13850167 +875 71.03971411 +876 70.87204535 +877 70.80240958 +878 70.68111697 +879 70.61825473 +880 70.57259726 +881 70.38983888 +882 70.38136988 +883 70.296864 +884 70.22374833 +885 70.01301989 +886 69.9547265 +887 69.80368793 +888 69.66682294 +889 69.58674446 +890 69.53078637 +891 69.47245364 +892 69.2892044 +893 69.21812646 +894 69.10278435 +895 68.97944608 +896 68.90118676 +897 68.78754012 +898 68.7781134 +899 68.63161424 +900 68.55341997 +901 68.44375851 +902 68.26619185 +903 68.17387821 +904 68.1067364 +905 68.0037676 +906 67.84595199 +907 67.72164019 +908 67.59821903 +909 67.55280532 +910 67.45436784 +911 67.31968997 +912 67.13815956 +913 67.03332528 +914 66.93924111 +915 66.91058087 +916 66.89843724 +917 66.84815053 +918 66.83475141 +919 66.71827241 +920 66.67554343 +921 66.50290341 +922 66.41092302 +923 66.37060972 +924 66.27378639 +925 66.22297863 +926 66.21378748 +927 66.18750052 +928 66.1067673 +929 66.06653338 +930 66.05436861 +931 65.99407717 +932 65.93399844 +933 65.8235334 +934 65.73604072 +935 65.61843396 +936 65.59111518 +937 65.49802554 +938 65.44474624 +939 65.43646462 +940 65.42790969 +941 65.4183837 +942 65.31026144 +943 65.24153336 +944 65.06807618 +945 64.93037422 +946 64.7980956 +947 64.62992858 +948 64.62299463 +949 64.5323349 +950 64.34052541 +951 64.2069101 +952 64.15975141 +953 64.03350651 +954 63.90578693 +955 63.87886555 +956 63.83229954 +957 63.7567763 +958 63.68328044 +959 63.55163865 +960 63.48732217 +961 63.42671472 +962 63.3626304 +963 63.33717356 +964 63.3206294 +965 63.30827576 +966 63.23982978 +967 63.17613845 +968 63.07928415 +969 62.95135955 +970 62.87113745 +971 62.82066775 +972 62.74025346 +973 62.62144085 +974 62.48589189 +975 62.40194554 +976 62.33991243 +977 62.27653795 +978 62.17473657 +979 62.1322206 +980 62.02976272 +981 61.9294509 +982 61.73615413 +983 61.67722996 +984 61.65371826 +985 61.51917595 +986 61.44883623 +987 61.40103852 +988 61.39355022 +989 61.28668955 +990 61.24210144 +991 61.15201253 +992 61.13945445 +993 61.10391857 +994 61.06796975 +995 61.035241 +996 60.98057681 +997 60.92399428 +998 60.843292 +999 60.74726042 diff --git a/catboost_info/time_left.tsv b/catboost_info/time_left.tsv index 838991c..c0525c2 100644 --- a/catboost_info/time_left.tsv +++ b/catboost_info/time_left.tsv @@ -1,1001 +1,1001 @@ iter Passed Remaining -0 1 1047 -1 2 1015 -2 2 965 -3 3 961 -4 4 923 -5 5 884 -6 6 858 -7 6 839 -8 7 828 -9 8 820 -10 9 818 -11 9 810 -12 10 799 -13 11 794 -14 11 786 -15 12 779 -16 13 775 -17 14 770 -18 14 765 -19 15 759 -20 16 766 -21 17 769 -22 18 771 -23 18 770 -24 19 769 -25 20 768 -26 21 765 -27 21 762 -28 22 759 -29 23 757 -30 24 753 -31 24 751 -32 25 749 -33 26 748 -34 27 744 -35 27 742 -36 29 760 -37 29 757 -38 30 755 -39 31 757 -40 32 757 -41 33 758 -42 34 756 -43 34 757 -44 35 753 -45 36 753 -46 37 751 -47 37 749 -48 38 747 -49 39 745 -50 39 743 -51 40 741 -52 41 738 -53 42 737 -54 42 735 -55 43 733 -56 44 732 -57 45 731 -58 45 729 -59 46 727 -60 47 729 -61 48 731 -62 49 731 -63 50 732 -64 50 731 -65 51 730 -66 52 728 -67 53 727 -68 53 726 -69 54 724 -70 55 723 -71 56 721 -72 56 720 -73 57 720 -74 58 719 -75 59 717 -76 59 716 -77 60 714 -78 61 713 -79 61 712 -80 62 712 -81 63 711 -82 64 714 -83 65 714 -84 66 714 -85 67 713 -86 67 712 -87 68 710 -88 69 709 -89 70 707 -90 70 706 -91 71 705 -92 72 703 -93 72 702 -94 73 700 -95 74 699 -96 74 697 -97 75 696 -98 76 695 -99 77 694 -100 77 693 -101 80 705 -102 80 705 -103 81 704 -104 82 703 -105 83 702 -106 84 702 -107 84 701 -108 85 699 -109 86 698 -110 87 697 -111 87 695 -112 88 694 -113 89 692 -114 89 692 -115 90 691 -116 91 690 -117 92 689 -118 92 687 -119 93 687 -120 94 687 -121 95 687 -122 96 687 -123 97 686 -124 97 685 -125 98 684 -126 99 682 -127 99 681 -128 101 684 -129 102 683 -130 102 681 -131 103 680 -132 104 679 -133 105 678 -134 105 677 -135 106 676 -136 107 674 -137 107 673 -138 108 673 -139 109 673 -140 110 672 -141 111 672 -142 112 671 -143 113 671 -144 113 671 -145 115 673 -146 115 672 -147 116 672 -148 117 671 -149 118 670 -150 119 670 -151 119 668 -152 120 667 -153 121 666 -154 122 665 -155 122 664 -156 123 663 -157 124 662 -158 125 661 -159 125 661 -160 126 660 -161 127 659 -162 128 658 -163 128 657 -164 129 656 -165 130 655 -166 131 654 -167 131 653 -168 132 652 -169 133 650 -170 133 649 -171 134 648 -172 135 646 -173 136 645 -174 136 644 -175 137 643 -176 138 642 -177 138 641 -178 139 641 -179 140 642 -180 141 641 -181 142 641 -182 143 640 -183 144 639 -184 144 638 -185 145 637 -186 146 636 -187 146 634 -188 147 633 -189 148 632 -190 149 634 -191 150 633 -192 151 632 -193 151 631 -194 152 629 -195 153 628 -196 154 627 -197 154 627 -198 155 626 -199 156 624 -200 157 624 -201 158 625 -202 159 624 -203 159 623 -204 160 622 -205 161 621 -206 161 620 -207 162 619 -208 163 618 -209 164 617 -210 164 616 -211 165 615 -212 166 614 -213 166 613 -214 167 612 -215 168 611 -216 169 610 -217 169 608 -218 170 608 -219 171 607 -220 172 606 -221 173 606 -222 173 606 -223 174 605 -224 175 604 -225 176 603 -226 176 602 -227 177 601 -228 178 601 -229 179 600 -230 180 600 -231 181 599 -232 181 598 -233 182 598 -234 183 597 -235 184 596 -236 185 596 -237 186 595 -238 187 595 -239 187 595 -240 188 594 -241 189 593 -242 190 593 -243 191 592 -244 191 591 -245 192 591 -246 193 590 -247 194 589 -248 195 589 -249 196 588 -250 197 587 -251 197 587 -252 198 586 -253 199 585 -254 200 584 -255 201 585 -256 202 585 -257 203 584 -258 204 584 -259 205 583 -260 205 582 -261 206 582 -262 207 581 -263 208 580 -264 208 579 -265 209 578 -266 210 577 -267 211 577 -268 212 576 -269 212 575 -270 213 575 -271 214 574 -272 215 573 -273 216 572 -274 217 572 -275 218 572 -276 219 573 -277 220 572 -278 222 573 -279 222 573 -280 223 572 -281 224 571 -282 225 570 -283 225 569 -284 226 568 -285 227 566 -286 227 565 -287 228 564 -288 229 563 -289 229 562 -290 230 562 -291 231 561 -292 234 565 -293 234 564 -294 235 563 -295 236 562 -296 237 561 -297 237 560 -298 238 559 -299 239 558 -300 239 557 -301 240 556 -302 241 555 -303 241 554 -304 242 552 -305 243 552 -306 244 551 -307 244 550 -308 245 549 -309 246 548 -310 247 547 -311 248 547 -312 248 546 -313 249 545 -314 250 544 -315 251 543 -316 251 542 -317 252 541 -318 253 540 -319 253 539 -320 254 538 -321 255 537 -322 255 536 -323 256 535 -324 257 533 -325 257 532 -326 258 531 -327 259 530 -328 259 529 -329 260 528 -330 261 528 -331 262 527 -332 262 526 -333 264 528 -334 265 527 -335 266 526 -336 267 525 -337 268 525 -338 268 524 -339 269 523 -340 270 522 -341 270 521 -342 271 520 -343 272 519 -344 273 518 -345 273 517 -346 274 516 -347 275 515 -348 275 514 -349 276 513 -350 277 512 -351 278 512 -352 279 511 -353 279 510 -354 280 510 -355 281 509 -356 282 508 -357 283 507 -358 283 506 -359 284 506 -360 285 505 -361 286 504 -362 286 503 -363 287 502 -364 288 501 -365 288 500 -366 289 499 -367 290 498 -368 290 497 -369 291 496 -370 292 495 -371 292 494 -372 293 493 -373 296 496 -374 297 495 -375 297 494 -376 298 494 -377 299 493 -378 300 492 -379 301 491 -380 301 490 -381 302 489 -382 303 488 -383 304 487 -384 304 486 -385 305 485 -386 306 484 -387 306 483 -388 307 483 -389 308 482 -390 309 481 -391 310 481 -392 311 480 -393 312 480 -394 312 479 -395 313 478 -396 314 477 -397 315 476 -398 315 475 -399 316 474 -400 317 473 -401 317 473 -402 318 472 -403 319 471 -404 320 470 -405 320 469 -406 321 468 -407 322 467 -408 322 466 -409 323 465 -410 324 464 -411 325 464 -412 326 463 -413 327 462 -414 327 462 -415 328 461 -416 329 460 -417 330 459 -418 330 458 -419 331 457 -420 332 456 -421 332 455 -422 333 454 -423 334 454 -424 334 453 -425 335 452 -426 336 451 -427 337 450 -428 337 449 -429 338 448 -430 339 447 -431 339 446 -432 340 446 -433 341 445 -434 342 444 -435 343 444 -436 344 443 -437 344 442 -438 345 441 -439 346 440 -440 346 439 -441 347 438 -442 348 437 -443 348 436 -444 349 436 -445 350 435 -446 351 434 -447 351 433 -448 352 432 -449 353 431 -450 353 430 -451 354 429 -452 355 429 -453 356 428 -454 357 427 -455 357 426 -456 358 426 -457 359 425 -458 360 424 -459 361 423 -460 361 422 -461 362 422 -462 363 421 -463 364 420 -464 365 420 -465 366 419 -466 367 419 -467 368 418 -468 368 417 -469 369 416 -470 370 415 -471 371 415 -472 371 414 -473 372 413 -474 373 412 -475 374 412 -476 375 411 -477 376 410 -478 376 410 -479 377 409 -480 378 408 -481 379 407 -482 379 406 -483 380 405 -484 381 404 -485 381 403 -486 382 402 -487 383 402 -488 383 401 -489 384 400 -490 385 399 -491 386 398 -492 387 398 -493 388 397 -494 389 396 -495 389 396 -496 390 395 -497 391 394 -498 392 393 -499 392 392 -500 393 391 -501 394 391 -502 394 390 -503 395 389 -504 396 388 -505 396 387 -506 397 386 -507 398 385 -508 399 384 -509 399 384 -510 400 383 -511 401 382 -512 402 381 -513 403 381 -514 403 380 -515 405 380 -516 405 379 -517 406 378 -518 407 377 -519 408 376 -520 409 376 -521 410 376 -522 411 375 -523 412 374 -524 413 373 -525 413 373 -526 414 372 -527 415 371 -528 416 370 -529 417 369 -530 420 371 -531 420 370 -532 421 369 -533 422 368 -534 423 367 -535 424 367 -536 424 366 -537 425 365 -538 426 364 -539 427 363 -540 427 363 -541 428 362 -542 429 361 -543 430 360 -544 431 359 -545 431 359 -546 432 358 -547 435 358 -548 435 358 -549 436 357 -550 437 356 -551 438 355 -552 439 354 -553 439 354 -554 440 353 -555 441 352 -556 442 351 -557 443 350 -558 443 350 -559 444 349 -560 445 348 -561 446 347 -562 446 346 -563 447 346 -564 448 345 -565 449 344 -566 450 343 -567 451 343 -568 451 342 -569 453 341 -570 453 341 -571 454 340 -572 455 339 -573 456 338 -574 456 337 -575 457 336 -576 458 335 -577 458 335 -578 459 334 -579 460 333 -580 461 332 -581 461 331 -582 462 330 -583 463 329 -584 464 329 -585 465 328 -586 466 327 -587 467 327 -588 467 326 -589 468 325 -590 469 324 -591 470 324 -592 470 323 -593 471 322 -594 472 321 -595 472 320 -596 473 319 -597 474 318 -598 475 318 -599 475 317 -600 476 316 -601 477 315 -602 477 314 -603 478 313 -604 479 312 -605 479 312 -606 481 311 -607 482 310 -608 482 310 -609 483 309 -610 484 308 -611 485 307 -612 485 306 -613 486 305 -614 487 305 -615 488 304 -616 488 303 -617 489 302 -618 490 301 -619 490 300 -620 491 300 -621 492 299 -622 492 298 -623 493 297 -624 494 296 -625 494 295 -626 496 295 -627 496 294 -628 497 293 -629 498 292 -630 499 292 -631 500 291 -632 500 290 -633 501 289 -634 502 288 -635 502 287 -636 503 287 -637 504 286 -638 505 285 -639 505 284 -640 506 283 -641 507 282 -642 507 281 -643 508 281 -644 509 280 -645 509 279 -646 510 278 -647 511 277 -648 512 276 -649 512 276 -650 513 275 -651 514 274 -652 515 273 -653 515 272 -654 516 272 -655 517 271 -656 518 270 -657 518 269 -658 519 268 -659 520 268 -660 521 267 -661 521 266 -662 522 265 -663 523 264 -664 524 263 -665 524 263 -666 525 262 -667 526 261 -668 527 260 -669 528 260 -670 528 259 -671 529 258 -672 530 257 -673 531 256 -674 531 256 -675 532 255 -676 533 254 -677 534 253 -678 534 252 -679 535 251 -680 536 251 -681 536 250 -682 537 249 -683 538 248 -684 538 247 -685 539 246 -686 540 246 -687 540 245 -688 541 244 -689 542 243 -690 543 243 -691 544 242 -692 544 241 -693 545 240 -694 546 239 -695 547 238 -696 547 238 -697 548 237 -698 549 236 -699 549 235 -700 550 234 -701 551 234 -702 552 233 -703 552 232 -704 553 231 -705 554 230 -706 554 229 -707 555 229 -708 556 228 -709 557 227 -710 558 227 -711 559 226 -712 560 225 -713 562 225 -714 563 224 -715 563 223 -716 564 222 -717 565 222 -718 565 221 -719 566 220 -720 567 219 -721 568 218 -722 568 217 -723 569 217 -724 570 216 -725 571 215 -726 571 214 -727 572 213 -728 573 213 -729 574 212 -730 575 211 -731 576 211 -732 577 210 -733 577 209 -734 578 208 -735 579 207 -736 579 206 -737 580 206 -738 581 205 -739 582 204 -740 582 203 -741 583 202 -742 584 202 -743 584 201 -744 585 200 -745 586 199 -746 587 198 -747 587 198 -748 590 197 -749 591 197 -750 592 196 -751 592 195 -752 593 194 -753 594 193 -754 595 193 -755 595 192 -756 596 191 -757 597 190 -758 597 189 -759 598 189 -760 599 188 -761 599 187 -762 600 186 -763 601 185 -764 602 184 -765 602 184 -766 603 183 -767 604 182 -768 605 181 -769 605 180 -770 606 180 -771 607 179 -772 608 178 -773 608 177 -774 609 176 -775 610 176 -776 610 175 -777 611 174 -778 612 173 -779 612 172 -780 613 172 -781 614 171 -782 615 170 -783 615 169 -784 616 168 -785 617 168 -786 617 167 -787 618 166 -788 619 165 -789 620 164 -790 621 164 -791 622 163 -792 622 162 -793 623 161 -794 624 160 -795 625 160 -796 625 159 -797 626 158 -798 627 157 -799 627 156 -800 628 156 -801 629 155 -802 629 154 -803 630 153 -804 631 152 -805 632 152 -806 632 151 -807 633 150 -808 635 150 -809 636 149 -810 636 148 -811 638 147 -812 638 146 -813 639 146 -814 640 145 -815 640 144 -816 644 144 -817 645 143 -818 646 142 -819 646 141 -820 647 141 -821 648 140 -822 649 139 -823 649 138 -824 651 138 -825 652 137 -826 652 136 -827 653 135 -828 655 135 -829 656 134 -830 657 133 -831 657 132 -832 658 132 -833 659 131 -834 660 130 -835 661 129 -836 661 128 -837 662 128 -838 663 127 -839 664 126 -840 665 125 -841 665 124 -842 667 124 -843 668 123 -844 669 122 -845 669 121 -846 670 121 -847 671 120 -848 672 119 -849 673 118 -850 674 118 -851 674 117 -852 675 116 -853 676 115 -854 677 114 -855 678 114 -856 679 113 -857 679 112 -858 680 111 -859 681 110 -860 684 110 -861 685 109 -862 686 109 -863 687 108 -864 688 107 -865 688 106 -866 689 105 -867 690 104 -868 691 104 -869 691 103 -870 692 102 -871 693 101 -872 693 100 -873 694 100 -874 695 99 -875 695 98 -876 696 97 -877 697 96 -878 698 96 -879 699 95 -880 700 94 -881 701 93 -882 701 93 -883 702 92 -884 703 91 -885 704 90 -886 704 89 -887 705 88 -888 706 88 -889 706 87 -890 707 86 -891 708 85 -892 709 84 -893 709 84 -894 710 83 -895 711 82 -896 712 81 -897 712 80 -898 713 80 -899 714 79 -900 715 78 -901 716 77 -902 716 77 -903 717 76 -904 718 75 -905 719 74 -906 719 73 -907 720 72 -908 721 72 -909 721 71 -910 722 70 -911 723 69 -912 723 68 -913 724 68 -914 725 67 -915 725 66 -916 726 65 -917 727 64 -918 728 64 -919 729 63 -920 730 62 -921 731 61 -922 731 61 -923 732 60 -924 733 59 -925 733 58 -926 734 57 -927 735 57 -928 736 56 -929 736 55 -930 737 54 -931 738 53 -932 738 53 -933 739 52 -934 740 51 -935 740 50 -936 741 49 -937 742 49 -938 743 48 -939 744 47 -940 745 46 -941 746 45 -942 747 45 -943 747 44 -944 748 43 -945 749 42 -946 749 41 -947 750 41 -948 751 40 -949 751 39 -950 752 38 -951 753 37 -952 753 37 -953 754 36 -954 755 35 -955 756 34 -956 757 34 -957 757 33 -958 759 32 -959 759 31 -960 760 30 -961 761 30 -962 762 29 -963 763 28 -964 763 27 -965 764 26 -966 765 26 -967 765 25 -968 766 24 -969 767 23 -970 767 22 -971 768 22 -972 769 21 -973 769 20 -974 770 19 -975 771 18 -976 772 18 -977 772 17 -978 773 16 -979 774 15 -980 775 15 -981 776 14 -982 777 13 -983 777 12 -984 778 11 -985 779 11 -986 780 10 -987 780 9 -988 781 8 -989 782 7 -990 782 7 -991 783 6 -992 784 5 -993 785 4 -994 785 3 -995 786 3 -996 787 2 -997 787 1 -998 788 0 -999 789 0 +0 1 1021 +1 1 997 +2 2 993 +3 3 967 +4 4 953 +5 5 911 +6 6 905 +7 7 896 +8 7 877 +9 9 948 +10 12 1135 +11 13 1110 +12 14 1090 +13 15 1066 +14 15 1046 +15 16 1033 +16 17 1017 +17 18 1034 +18 19 1024 +19 20 1010 +20 21 1001 +21 22 995 +22 23 987 +23 24 1013 +24 25 1008 +25 27 1048 +26 29 1051 +27 30 1045 +28 30 1035 +29 32 1039 +30 33 1034 +31 34 1036 +32 35 1026 +33 37 1069 +34 38 1065 +35 39 1053 +36 40 1042 +37 41 1043 +38 42 1053 +39 45 1081 +40 46 1080 +41 47 1074 +42 48 1071 +43 49 1067 +44 50 1065 +45 51 1060 +46 52 1054 +47 53 1051 +48 53 1046 +49 54 1039 +50 55 1041 +51 58 1063 +52 59 1066 +53 60 1061 +54 61 1056 +55 62 1053 +56 63 1052 +57 64 1047 +58 67 1076 +59 68 1069 +60 69 1062 +61 69 1055 +62 70 1055 +63 71 1051 +64 73 1056 +65 75 1062 +66 76 1061 +67 77 1058 +68 78 1056 +69 79 1054 +70 80 1050 +71 81 1044 +72 81 1038 +73 82 1034 +74 83 1029 +75 84 1023 +76 85 1019 +77 85 1016 +78 86 1012 +79 88 1023 +80 90 1022 +81 91 1018 +82 93 1036 +83 95 1036 +84 95 1031 +85 96 1026 +86 97 1021 +87 97 1015 +88 98 1009 +89 99 1004 +90 99 998 +91 100 993 +92 101 989 +93 102 985 +94 104 991 +95 105 992 +96 106 988 +97 106 984 +98 107 981 +99 109 981 +100 109 978 +101 110 975 +102 111 972 +103 112 969 +104 113 963 +105 113 958 +106 114 954 +107 115 949 +108 115 945 +109 116 941 +110 117 937 +111 118 936 +112 120 943 +113 121 942 +114 122 939 +115 123 942 +116 124 939 +117 125 936 +118 126 934 +119 126 930 +120 127 926 +121 128 924 +122 129 921 +123 129 917 +124 130 912 +125 131 909 +126 131 905 +127 132 902 +128 133 902 +129 134 899 +130 136 907 +131 137 904 +132 138 902 +133 139 900 +134 140 898 +135 140 895 +136 141 893 +137 143 894 +138 143 891 +139 144 888 +140 145 885 +141 146 882 +142 146 879 +143 147 876 +144 148 877 +145 149 875 +146 150 874 +147 151 873 +148 152 871 +149 153 869 +150 154 868 +151 155 865 +152 155 863 +153 156 862 +154 157 860 +155 158 857 +156 159 854 +157 159 851 +158 160 849 +159 161 846 +160 161 843 +161 162 841 +162 164 842 +163 164 840 +164 168 853 +165 169 852 +166 170 850 +167 171 849 +168 172 846 +169 172 843 +170 173 841 +171 174 838 +172 174 836 +173 175 833 +174 176 831 +175 176 828 +176 177 826 +177 178 823 +178 179 821 +179 180 820 +180 181 819 +181 184 827 +182 184 825 +183 186 824 +184 186 822 +185 187 821 +186 188 821 +187 189 818 +188 190 816 +189 190 814 +190 191 811 +191 192 809 +192 193 807 +193 193 804 +194 195 805 +195 195 803 +196 198 809 +197 199 808 +198 200 806 +199 201 805 +200 202 803 +201 202 801 +202 204 804 +203 205 802 +204 206 800 +205 207 798 +206 207 795 +207 208 793 +208 208 790 +209 209 788 +210 210 788 +211 211 787 +212 213 788 +213 214 788 +214 215 788 +215 216 787 +216 219 790 +217 220 789 +218 220 787 +219 221 786 +220 222 785 +221 223 783 +222 224 781 +223 225 780 +224 227 782 +225 228 784 +226 229 782 +227 230 780 +228 231 779 +229 232 777 +230 233 776 +231 234 774 +232 234 773 +233 235 771 +234 236 769 +235 237 767 +236 237 765 +237 238 763 +238 239 762 +239 240 760 +240 241 761 +241 243 761 +242 245 764 +243 246 762 +244 247 761 +245 247 759 +246 248 757 +247 249 756 +248 251 758 +249 252 756 +250 252 754 +251 253 753 +252 254 751 +253 255 750 +254 256 749 +255 258 750 +256 259 749 +257 260 749 +258 261 747 +259 262 746 +260 262 744 +261 263 742 +262 264 742 +263 265 740 +264 266 738 +265 267 738 +266 268 737 +267 269 736 +268 270 735 +269 272 736 +270 273 736 +271 274 735 +272 280 748 +273 282 747 +274 282 745 +275 283 744 +276 284 743 +277 285 741 +278 289 747 +279 290 747 +280 291 746 +281 294 748 +282 295 748 +283 296 746 +284 297 746 +285 298 744 +286 299 743 +287 300 742 +288 301 741 +289 304 744 +290 307 749 +291 308 747 +292 309 747 +293 311 748 +294 312 747 +295 313 745 +296 314 743 +297 314 741 +298 315 739 +299 316 737 +300 316 735 +301 317 733 +302 318 731 +303 319 730 +304 319 728 +305 321 730 +306 323 729 +307 324 728 +308 325 727 +309 326 725 +310 327 724 +311 327 723 +312 331 726 +313 344 753 +314 345 752 +315 346 750 +316 347 749 +317 348 748 +318 349 746 +319 350 744 +320 352 745 +321 354 745 +322 355 744 +323 356 742 +324 356 741 +325 357 739 +326 358 737 +327 358 735 +328 359 733 +329 360 731 +330 361 729 +331 361 727 +332 362 725 +333 363 724 +334 363 722 +335 364 720 +336 365 719 +337 367 719 +338 368 718 +339 369 717 +340 370 716 +341 371 714 +342 372 712 +343 372 711 +344 373 709 +345 374 707 +346 375 706 +347 376 705 +348 377 704 +349 378 702 +350 379 701 +351 380 699 +352 380 698 +353 381 696 +354 382 695 +355 385 697 +356 386 696 +357 387 694 +358 388 693 +359 389 691 +360 389 690 +361 390 688 +362 391 687 +363 392 686 +364 393 684 +365 394 683 +366 395 682 +367 398 683 +368 399 682 +369 400 682 +370 401 681 +371 402 680 +372 403 678 +373 404 677 +374 405 675 +375 406 673 +376 406 672 +377 407 670 +378 408 669 +379 409 667 +380 410 666 +381 411 664 +382 412 663 +383 413 662 +384 415 663 +385 416 662 +386 417 661 +387 418 659 +388 419 658 +389 420 657 +390 421 655 +391 421 654 +392 422 652 +393 423 651 +394 424 650 +395 425 649 +396 426 647 +397 427 646 +398 428 645 +399 429 643 +400 430 642 +401 431 641 +402 431 639 +403 432 638 +404 433 636 +405 434 635 +406 434 633 +407 435 632 +408 436 630 +409 436 628 +410 437 627 +411 438 625 +412 438 623 +413 439 622 +414 440 620 +415 441 619 +416 441 617 +417 443 617 +418 443 615 +419 446 617 +420 447 615 +421 448 614 +422 449 612 +423 450 611 +424 450 610 +425 451 608 +426 452 607 +427 453 606 +428 454 605 +429 455 603 +430 456 602 +431 457 600 +432 458 600 +433 459 598 +434 460 597 +435 462 598 +436 463 597 +437 464 595 +438 465 594 +439 468 596 +440 469 595 +441 470 593 +442 471 592 +443 471 590 +444 472 589 +445 473 587 +446 476 589 +447 478 589 +448 479 587 +449 479 586 +450 480 585 +451 481 583 +452 482 582 +453 483 581 +454 483 579 +455 484 578 +456 485 576 +457 486 575 +458 487 574 +459 488 572 +460 489 572 +461 490 571 +462 492 571 +463 493 570 +464 494 568 +465 495 567 +466 498 568 +467 499 567 +468 500 566 +469 501 565 +470 503 565 +471 503 563 +472 504 562 +473 505 561 +474 508 561 +475 509 560 +476 512 561 +477 512 560 +478 514 559 +479 515 558 +480 516 556 +481 516 555 +482 517 554 +483 518 553 +484 519 551 +485 521 551 +486 522 550 +487 523 549 +488 525 548 +489 526 548 +490 528 547 +491 528 546 +492 530 545 +493 531 544 +494 532 543 +495 533 542 +496 534 541 +497 535 539 +498 536 538 +499 537 537 +500 539 536 +501 540 535 +502 541 534 +503 543 534 +504 545 534 +505 546 533 +506 547 532 +507 548 531 +508 549 530 +509 550 529 +510 552 528 +511 554 528 +512 558 529 +513 559 528 +514 560 527 +515 561 526 +516 561 524 +517 562 523 +518 563 522 +519 564 520 +520 565 519 +521 565 518 +522 567 517 +523 568 516 +524 571 517 +525 572 515 +526 573 514 +527 574 513 +528 575 512 +529 576 511 +530 577 510 +531 578 508 +532 579 507 +533 580 506 +534 580 504 +535 581 503 +536 582 502 +537 583 501 +538 585 501 +539 586 499 +540 587 498 +541 588 497 +542 589 496 +543 590 494 +544 590 493 +545 591 491 +546 592 490 +547 592 489 +548 593 487 +549 594 486 +550 595 484 +551 595 483 +552 596 482 +553 598 481 +554 599 480 +555 600 479 +556 602 478 +557 603 477 +558 604 476 +559 605 475 +560 605 474 +561 606 472 +562 607 471 +563 608 470 +564 609 469 +565 610 467 +566 611 466 +567 611 465 +568 614 465 +569 615 464 +570 617 463 +571 618 462 +572 619 461 +573 619 460 +574 620 458 +575 621 457 +576 622 456 +577 622 454 +578 623 453 +579 624 452 +580 625 451 +581 626 449 +582 627 448 +583 628 447 +584 629 446 +585 630 445 +586 631 444 +587 632 443 +588 633 442 +589 634 441 +590 635 439 +591 636 438 +592 637 437 +593 637 435 +594 638 434 +595 639 433 +596 640 432 +597 640 430 +598 641 429 +599 642 428 +600 644 427 +601 645 426 +602 648 426 +603 649 425 +604 650 424 +605 651 423 +606 651 422 +607 653 421 +608 654 420 +609 655 419 +610 656 417 +611 657 416 +612 659 416 +613 660 415 +614 661 414 +615 663 413 +616 664 412 +617 665 411 +618 666 410 +619 667 408 +620 667 407 +621 668 406 +622 669 405 +623 670 403 +624 671 402 +625 672 401 +626 673 400 +627 674 399 +628 675 398 +629 677 398 +630 679 397 +631 680 396 +632 681 394 +633 681 393 +634 682 392 +635 683 391 +636 684 389 +637 685 388 +638 685 387 +639 687 386 +640 688 385 +641 688 384 +642 689 382 +643 690 381 +644 691 380 +645 694 380 +646 695 379 +647 696 378 +648 697 377 +649 698 376 +650 699 374 +651 699 373 +652 700 372 +653 701 371 +654 702 369 +655 703 368 +656 703 367 +657 704 366 +658 706 365 +659 706 364 +660 708 363 +661 709 362 +662 711 361 +663 712 360 +664 713 359 +665 713 358 +666 714 356 +667 715 355 +668 716 354 +669 717 353 +670 717 352 +671 718 350 +672 719 349 +673 720 348 +674 721 347 +675 722 346 +676 726 346 +677 727 345 +678 728 344 +679 729 343 +680 730 342 +681 730 340 +682 731 339 +683 732 338 +684 733 337 +685 734 336 +686 735 334 +687 736 333 +688 737 332 +689 738 331 +690 739 330 +691 740 329 +692 741 328 +693 742 327 +694 743 326 +695 744 325 +696 744 323 +697 745 322 +698 746 321 +699 747 320 +700 748 319 +701 748 317 +702 749 316 +703 750 315 +704 752 314 +705 754 314 +706 755 313 +707 756 312 +708 757 310 +709 758 309 +710 759 308 +711 759 307 +712 760 306 +713 761 304 +714 762 303 +715 762 302 +716 763 301 +717 764 300 +718 765 299 +719 765 297 +720 767 296 +721 768 295 +722 770 295 +723 770 293 +724 771 292 +725 772 291 +726 773 290 +727 775 289 +728 776 288 +729 776 287 +730 777 286 +731 778 285 +732 779 283 +733 779 282 +734 780 281 +735 781 280 +736 782 279 +737 783 278 +738 784 276 +739 784 275 +740 785 274 +741 786 273 +742 787 272 +743 788 271 +744 788 269 +745 789 268 +746 790 267 +747 790 266 +748 791 265 +749 792 264 +750 792 262 +751 793 261 +752 794 260 +753 794 259 +754 795 258 +755 796 256 +756 796 255 +757 797 254 +758 799 253 +759 800 252 +760 800 251 +761 801 250 +762 802 249 +763 803 248 +764 803 246 +765 804 245 +766 805 244 +767 805 243 +768 806 242 +769 807 241 +770 807 239 +771 808 238 +772 809 237 +773 809 236 +774 810 235 +775 811 234 +776 811 233 +777 812 231 +778 813 230 +779 814 229 +780 814 228 +781 815 227 +782 816 226 +783 817 225 +784 817 223 +785 818 222 +786 819 221 +787 820 220 +788 820 219 +789 821 218 +790 821 217 +791 822 216 +792 823 214 +793 824 213 +794 824 212 +795 825 211 +796 826 210 +797 826 209 +798 827 208 +799 828 207 +800 829 206 +801 829 204 +802 830 203 +803 831 202 +804 832 201 +805 832 200 +806 833 199 +807 834 198 +808 834 197 +809 835 195 +810 836 194 +811 836 193 +812 837 192 +813 838 191 +814 838 190 +815 839 189 +816 840 188 +817 840 187 +818 841 186 +819 842 184 +820 843 183 +821 843 182 +822 844 181 +823 845 180 +824 846 179 +825 847 178 +826 847 177 +827 848 176 +828 849 175 +829 850 174 +830 850 173 +831 851 171 +832 852 170 +833 852 169 +834 853 168 +835 854 167 +836 854 166 +837 855 165 +838 856 164 +839 857 163 +840 857 162 +841 858 161 +842 859 159 +843 860 159 +844 861 157 +845 861 156 +846 863 155 +847 863 154 +848 864 153 +849 865 152 +850 865 151 +851 866 150 +852 867 149 +853 867 148 +854 868 147 +855 869 146 +856 869 145 +857 870 144 +858 871 143 +859 872 141 +860 872 140 +861 873 139 +862 874 138 +863 875 137 +864 876 136 +865 876 135 +866 877 134 +867 878 133 +868 879 132 +869 879 131 +870 880 130 +871 881 129 +872 881 128 +873 882 127 +874 883 126 +875 883 125 +876 884 124 +877 885 123 +878 885 121 +879 886 120 +880 887 119 +881 888 118 +882 888 117 +883 889 116 +884 890 115 +885 891 114 +886 892 113 +887 893 112 +888 894 111 +889 894 110 +890 895 109 +891 896 108 +892 896 107 +893 897 106 +894 898 105 +895 898 104 +896 899 103 +897 900 102 +898 901 101 +899 901 100 +900 902 99 +901 903 98 +902 903 97 +903 904 96 +904 905 95 +905 905 93 +906 906 92 +907 907 91 +908 908 90 +909 909 89 +910 909 88 +911 910 87 +912 911 86 +913 911 85 +914 912 84 +915 913 83 +916 913 82 +917 914 81 +918 915 80 +919 916 79 +920 916 78 +921 917 77 +922 918 76 +923 918 75 +924 919 74 +925 920 73 +926 920 72 +927 922 71 +928 923 70 +929 923 69 +930 924 68 +931 925 67 +932 926 66 +933 926 65 +934 927 64 +935 928 63 +936 928 62 +937 929 61 +938 930 60 +939 930 59 +940 931 58 +941 932 57 +942 933 56 +943 933 55 +944 934 54 +945 935 53 +946 935 52 +947 936 51 +948 937 50 +949 938 49 +950 938 48 +951 939 47 +952 940 46 +953 941 45 +954 942 44 +955 942 43 +956 943 42 +957 943 41 +958 944 40 +959 945 39 +960 945 38 +961 946 37 +962 947 36 +963 947 35 +964 948 34 +965 949 33 +966 949 32 +967 950 31 +968 951 30 +969 952 29 +970 953 28 +971 953 27 +972 954 26 +973 955 25 +974 956 24 +975 957 23 +976 957 22 +977 958 21 +978 959 20 +979 959 19 +980 960 18 +981 961 17 +982 961 16 +983 962 15 +984 963 14 +985 964 13 +986 964 12 +987 965 11 +988 966 10 +989 967 9 +990 968 8 +991 969 7 +992 969 6 +993 970 5 +994 971 4 +995 972 3 +996 974 2 +997 975 1 +998 975 0 +999 976 0 diff --git a/nn_LYC/train_model_nn.py b/nn_LYC/train_model_nn.py new file mode 100644 index 0000000..391d8cd --- /dev/null +++ b/nn_LYC/train_model_nn.py @@ -0,0 +1,110 @@ +# %% +import pandas as pd + +from itertools import product + + +def get_state_vect_cols(prefix=''): + if prefix: + prefix += '_' + vectors = ['r', 'v'] + components = ['x', 'y', 'z'] + col_names = [f'{prefix}{v}_{c}' for v, c in product(vectors, components)] + return col_names + + +# %% +df = pd.read_parquet("traindata/physics_preds.parquet") +test_set = df[df['aso_id'] == "05277"] + +train_set = df.groupby('aso_id').apply(lambda x: x.head(x.count()[0] - 3)) +print(df.count()[0], train_set.count()[0], test_set.count()[0]) + +# %% +from sklearn.model_selection import train_test_split + +feature_cols = [ + 'elapsed_seconds' +] + get_state_vect_cols('physics_pred') + get_state_vect_cols('start') +print(feature_cols) +# The target values are the errors between the physical model predictions +# and the ground truth observations +target_cols = get_state_vect_cols('physics_err') +print(target_cols) +# Create feature and target matrices +X = df[feature_cols] +y = df[target_cols] +data_keys = ['X_train', 'X_test', 'y_train', 'y_test'] +data_vals = train_test_split(X, y, test_size=0.2) +train_test_data = dict(zip(data_keys, data_vals)) + +# %% +import torch +import torch.nn as nn +from torch.utils.data import TensorDataset, DataLoader +from sklearn import metrics + +TestNet2 = lambda: nn.Sequential( + nn.Linear(13, 64), + nn.ReLU6(), + nn.Linear(64, 64), + nn.LeakyReLU(), + nn.Linear(64, 1), +) + +nets = {} + +X_train = torch.tensor(train_test_data["X_train"].values, + dtype=torch.float32).cuda() +y_train = train_test_data["y_train"] +X_test = torch.tensor(train_test_data['X_test'].values, + dtype=torch.float32).cuda() +y_test = train_test_data['y_test'] +r2s = [] +for target_col in y_train.columns: + y1 = torch.tensor(y_train[target_col].values, + dtype=torch.float32).reshape(-1, 1).cuda() + print(X.shape, y1.shape) + net = TestNet2().cuda() + opti = torch.optim.SGD(net.parameters(), lr=0.02) + loss_func = nn.MSELoss() + train_dataloader = DataLoader(TensorDataset(X_train, y1), batch_size=320) + for t in range(10000): + for batch, (x, y) in enumerate(train_dataloader): + pred = net(x) + loss = loss_func(pred, y) + opti.zero_grad() + torch.sqrt(loss).backward() + opti.step() + with torch.no_grad(): + y = y_test[target_col] + y_hat = net(X_test).cpu().numpy() + rmse = metrics.mean_squared_error(y, y_hat, squared=False) + r2 = metrics.r2_score(y, y_hat) + r2s.append(r2) + print(f"Epoch {t}: r2={r2}, rmse={rmse}") + nets[target_col] = net + print(target_col) + break + +import matplotlib.pyplot as plt + +plt.plot(r2s) +plt.show() + +# %% +X, ys = train_test_data['X_test'], train_test_data['y_test'] +evals = [] +with torch.no_grad(): + for target_col, net in nets.items(): + y_hat = net(torch.tensor(X.values)) # fake + y_hat = y_hat.detach().numpy() + y = ys[target_col] # real + print(y) + print(y_hat) + rmse = metrics.mean_squared_error(y, y_hat, squared=False) + r2 = metrics.r2_score(y, y_hat) + eval_dict = {'Error': target_col, 'RMSE': rmse, 'R^2': r2} + evals.append(eval_dict) +print(pd.DataFrame(evals)) +# %% diff --git a/nn_TR/FNN.py b/nn_TR/FNN.py new file mode 100644 index 0000000..8e5118c --- /dev/null +++ b/nn_TR/FNN.py @@ -0,0 +1,343 @@ +#数据预处理 +import pandas as pd +from itertools import product +from torch.utils.data import DataLoader +from sklearn.metrics import r2_score +from sklearn.utils import shuffle + +def get_state_vect_cols(prefix=''): + if prefix: + prefix += '_' + vectors = ['r', 'v'] + components = ['x', 'y', 'z'] + col_names = [f'{prefix}{v}_{c}' for v, c in product(vectors, components)] + return col_names + +pd.set_option('display.max_rows', None) +pd.set_option('display.max_columns', None) +pd.set_option('display.width', 1000) + +# %% +df = pd.read_parquet("physics_preds.parquet") +test_set = df[df['aso_id'] == "05277"] + +train_set = df.groupby('aso_id').apply(lambda x: x.head(x.count()[0] - 3)) +print(df.count()[0], train_set.count()[0], test_set.count()[0]) +data_count = df.count()[0] + +# %% +feature_cols = [ + 'elapsed_seconds' +] + get_state_vect_cols('physics_pred') + get_state_vect_cols('start') +print(feature_cols) +# The target values are the errors between the physical model predictions +# and the ground truth observations +target_cols = get_state_vect_cols('physics_err') +print(target_cols) + +print("样本统一归一化处理") +all_cols = feature_cols + target_cols +df = df[all_cols] +df = shuffle(df) +df = (df - df.mean(axis=0)) / df.std(axis=0) +# Create feature and target matrices +X = df[feature_cols] +y_all = df[target_cols] +# y = df['physics_err_v_x'] +# print(y) +# %% + + + + + + + +#FNN神经网络 + +import torch +import torch.nn as nn +import torchvision.transforms as transforms +import torchvision.datasets as dsets + +device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") + +class Net(nn.Module): + + def __init__(self, features): + super(Net, self).__init__() + + self.linear_relu1 = nn.Linear(features, 128) + self.linear_relu2 = nn.Linear(128, 256) + self.linear_relu3 = nn.Linear(256, 256) + self.linear_relu4 = nn.Linear(256, 256) + self.linear5 = nn.Linear(256, 1) + + def forward(self, x): + y_pred = self.linear_relu1(x) + y_pred = nn.functional.relu(y_pred) + + y_pred = self.linear_relu2(y_pred) + y_pred = nn.functional.relu(y_pred) + + y_pred = self.linear_relu3(y_pred) + y_pred = nn.functional.relu(y_pred) + + y_pred = self.linear_relu4(y_pred) + y_pred = nn.functional.relu(y_pred) + + y_pred = self.linear5(y_pred) + return y_pred + +for target_col in y_all.columns: + y = y_all[target_col] + train_size = int(data_count * 0.80) + # 训练数据集特征 + train_features = torch.tensor(X[:train_size].values, dtype=torch.float) + train_features = train_features.to(device) + # 训练数据集目标 + train_labels = torch.tensor(y[:train_size].values, dtype=torch.float).view(-1, 1) + train_labels = train_labels.to(device) + # print("train data size: ", train_features.shape) + # print("label data size: ", train_labels.shape) + # print("test data size: ", test_features.shape) + + model = Net(features=train_features.shape[1]) + model = model.to(device) + model.train() + + criterion = nn.MSELoss(reduction='mean') + criterion = criterion.to(device) + optimizer = torch.optim.Adam(model.parameters(), lr=1e-4) + + losses = [] + epoch = 5000 + epoch_real = 0 + for t in range(epoch): + y_pred = model(train_features) + y_pred.to(device) + + loss = criterion(y_pred, train_labels) + losses.append(loss.item()) + + if torch.isnan(loss): + break + + optimizer.zero_grad() + + loss.backward() + + optimizer.step() + epoch_real = epoch_real + 1 + + print(f"epoch:{epoch_real} loss:{losses[-1]} target:{target_col}") + torch.save(model.state_dict(), './{}_FNN.pth'.format(target_col)) + +evals = [] +for target_col in y_all.columns: + y = y_all[target_col] + # 测试数据集特征 + test_features = torch.tensor(X[train_size:].values, dtype=torch.float) + test_features = test_features.to(device) + model.load_state_dict(torch.load('./{}_FNN.pth'.format(target_col), map_location=lambda storage, loc: storage)) + model.to(device) + pred_labels = model(test_features) + pred_labels = pred_labels.cpu().data.numpy() + real_labels = y[train_size:].values + r2 = r2_score(real_labels, pred_labels) + eval_dict = {'Error': target_col, 'R^2': r2} + evals.append(eval_dict) + # print(r2) +print(pd.DataFrame(evals)) + + + +# model_sequential = nn.Sequential( +# nn.Linear(test_features.shape[1], 128), +# nn.ReLU(), +# nn.Linear(128, 256), +# nn.ReLU(), +# nn.Linear(256, 256), +# nn.ReLU(), +# nn.Linear(256, 256), +# nn.ReLU(), +# nn.Linear(256, 1) +# ) + + + + + + + + + + + + +# +# ''' +# STEP 1: LOADING DATASET +# ''' +# +# train_dataset = dsets.MNIST(root='./data', +# train=True, +# transform=transforms.ToTensor(), +# download=True) +# +# test_dataset = dsets.MNIST(root='./data', +# train=False, +# transform=transforms.ToTensor()) +# +# ''' +# STEP 2: MAKING DATASET ITERABLE +# ''' +# +# batch_size = 100 +# n_iters = 3000 +# num_epochs = n_iters / (len(train_dataset) / batch_size) +# num_epochs = int(num_epochs) +# +# train_loader = torch.utils.data.DataLoader(dataset=train_dataset, +# batch_size=batch_size, +# shuffle=True) +# +# test_loader = torch.utils.data.DataLoader(dataset=test_dataset, +# batch_size=batch_size, +# shuffle=False) +# +# ''' +# STEP 3: CREATE MODEL CLASS +# ''' +# class FeedforwardNeuralNetModel(nn.Module): +# def __init__(self, input_dim, hidden_dim, output_dim): +# super(FeedforwardNeuralNetModel, self).__init__() +# # Linear function 1: 784 --> 100 +# self.fc1 = nn.Linear(input_dim, hidden_dim) +# # Non-linearity 1 +# self.relu1 = nn.ReLU() +# +# # Linear function 2: 100 --> 100 +# self.fc2 = nn.Linear(hidden_dim, hidden_dim) +# # Non-linearity 2 +# self.relu2 = nn.ReLU() +# +# # Linear function 3: 100 --> 100 +# self.fc3 = nn.Linear(hidden_dim, hidden_dim) +# # Non-linearity 3 +# self.relu3 = nn.ReLU() +# +# # Linear function 4 (readout): 100 --> 10 +# self.fc4 = nn.Linear(hidden_dim, output_dim) +# +# def forward(self, x): +# # Linear function 1 +# out = self.fc1(x) +# # Non-linearity 1 +# out = self.relu1(out) +# +# # Linear function 2 +# out = self.fc2(out) +# # Non-linearity 2 +# out = self.relu2(out) +# +# # Linear function 2 +# out = self.fc3(out) +# # Non-linearity 2 +# out = self.relu3(out) +# +# # Linear function 4 (readout) +# out = self.fc4(out) +# return out +# ''' +# STEP 4: INSTANTIATE MODEL CLASS +# ''' +# input_dim = 28*28 +# hidden_dim = 100 +# output_dim = 10 +# +# model = FeedforwardNeuralNetModel(input_dim, hidden_dim, output_dim) +# +# ####################### +# # USE GPU FOR MODEL # +# ####################### +# +# device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") +# model.to(device) +# +# ''' +# STEP 5: INSTANTIATE LOSS CLASS +# ''' +# criterion = nn.CrossEntropyLoss() +# +# +# ''' +# STEP 6: INSTANTIATE OPTIMIZER CLASS +# ''' +# learning_rate = 0.1 +# +# optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate) +# +# ''' +# STEP 7: TRAIN THE MODEL +# ''' +# iter = 0 +# for epoch in range(num_epochs): +# for i, (images, labels) in enumerate(train_loader): +# +# ####################### +# # USE GPU FOR MODEL # +# ####################### +# images = images.view(-1, 28*28).requires_grad_().to(device) +# labels = labels.to(device) +# +# # Clear gradients w.r.t. parameters +# optimizer.zero_grad() +# +# # Forward pass to get output/logits +# outputs = model(images) +# +# # Calculate Loss: softmax --> cross entropy loss +# loss = criterion(outputs, labels) +# +# # Getting gradients w.r.t. parameters +# loss.backward() +# +# # Updating parameters +# optimizer.step() +# +# iter += 1 +# +# if iter % 500 == 0: +# # Calculate Accuracy +# correct = 0 +# total = 0 +# # Iterate through test dataset +# for images, labels in test_loader: +# ####################### +# # USE GPU FOR MODEL # +# ####################### +# images = images.view(-1, 28*28).requires_grad_().to(device) +# +# # Forward pass only to get logits/output +# outputs = model(images) +# +# # Get predictions from the maximum value +# _, predicted = torch.max(outputs.data, 1) +# +# # Total number of labels +# total += labels.size(0) +# +# ####################### +# # USE GPU FOR MODEL # +# ####################### +# # Total correct predictions +# if torch.cuda.is_available(): +# correct += (predicted.cpu() == labels.cpu()).sum() +# else: +# correct += (predicted == labels).sum() +# +# accuracy = 100 * correct / total +# +# # Print Loss +# print('Iteration: {}. Loss: {}. Accuracy: {}'.format(iter, loss.item(), accuracy)) \ No newline at end of file diff --git a/nn_TR/train_LSTM.py b/nn_TR/train_LSTM.py new file mode 100644 index 0000000..e1339b6 --- /dev/null +++ b/nn_TR/train_LSTM.py @@ -0,0 +1,425 @@ +#https://blog.csdn.net/weixin_35757704/article/details/115910672 +#https://www.bilibili.com/video/BV1hE411t7RN?p=27 +#https://www.zhihu.com/question/39792141 +import numpy as np +from typing import Union +import pandas as pd +from itertools import product +import torch +from torch import nn +from torch.utils.data import DataLoader +import torchvision +from sklearn.model_selection import train_test_split +from sklearn.utils.validation import check_X_y +import joblib +from catboost import CatBoostRegressor +from lightgbm import LGBMRegressor +from xgboost import XGBRegressor +from sklearn.metrics import r2_score +from CNN_architecture import CNN_architecture +from sklearn.utils import shuffle + +def get_state_vect_cols(prefix=''): + if prefix: + prefix += '_' + vectors = ['r', 'v'] + components = ['x', 'y', 'z'] + col_names = [f'{prefix}{v}_{c}' for v, c in product(vectors, components)] + return col_names + +pd.set_option('display.max_rows', None) +pd.set_option('display.max_columns', None) +pd.set_option('display.width', 1000) +# %% +df = pd.read_parquet("traindata/physics_preds.parquet") +test_set = df[df['aso_id'] == "05277"] + +train_set = df.groupby('aso_id').apply(lambda x: x.head(x.count()[0] - 3)) +print(df.count()[0], train_set.count()[0], test_set.count()[0]) +data_count = df.count()[0] + +# print(train_set) + +# %% +feature_cols = [ + 'elapsed_seconds' +] + get_state_vect_cols('physics_pred') + get_state_vect_cols('start') +print(feature_cols) +# The target values are the errors between the physical model predictions +# and the ground truth observations +target_cols = get_state_vect_cols('physics_err') +print(target_cols) + +print("样本统一归一化处理") +all_cols = feature_cols + target_cols +df = df[all_cols] +df = shuffle(df) +df = (df - df.mean(axis=0)) / df.std(axis=0) +# Create feature and target matrices +feature_cols = ['physics_err_v_x'] + feature_cols +X = df[feature_cols] +y = df[target_cols] +# data_keys = ['X_train', 'X_test', 'y_train', 'y_test'] +# data_vals = train_test_split(X, y, test_size=0.2) +# train_test_data = dict(zip(data_keys, data_vals)) +# train_test_data['X_test'] = test_set[feature_cols] +# train_test_data['y_test'] = test_set[target_cols] +# train_test_data = { +# 'X_train': train_set[feature_cols], +# 'y_train': train_set[target_cols], +# 'X_test': test_set[feature_cols], +# 'y_test': test_set[target_cols], +# } + +# %% + +import numpy as np +import torch +from torch import nn + +import matplotlib.pyplot as plt + +""" +Github: Yonv1943 Zen4 Jia1 hao2 +https://github.com/Yonv1943/DL_RL_Zoo/blob/master/RNN + +The source of training data +https://github.com/L1aoXingyu/ +code-of-learn-deep-learning-with-pytorch/blob/master/ +chapter5_RNN/time-series/lstm-time-series.ipynb +""" + +import os +os.environ["CUDA_VISIBLE_DEVICES"] = "0" + +def run_train_lstm(X): + + inp_dim = 14 + out_dim = 1 + mid_dim = 15 + mid_layers = 2 + batch_size = 12 * 4 + mod_dir = '.' + + '''load data''' + data = X.values + pd.set_option('display.max_rows', None) + pd.set_option('display.max_columns', None) + pd.set_option('display.width', 1000) + # np.set_printoptions(threshold='nan') + # print(data) + data_x = data[:-1, :] + data_y = data[+1:, 0] + # print(data_y) + assert data_x.shape[1] == inp_dim + train_size = int(data_count * 0.80) + + train_x = data_x[:train_size] + train_y = data_y[:train_size] + train_x = train_x.reshape((train_size, inp_dim)) + train_y = train_y.reshape((train_size, out_dim)) + + # print(train_y) + + # X, ys = train_test_data['X_train'], train_test_data['y_train'] + # + # check_X_y(X, ys, multi_output=True) + # + # train_x = X.values + # train_y = ys['physics_err_r_x'].values + # train_y = ys.reshape(ys.shape[0], 1) + + '''build model''' + # device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + device = torch.device("cuda") + net = RegLSTM(inp_dim, out_dim, mid_dim, mid_layers).to(device) + criterion = nn.MSELoss() + optimizer = torch.optim.Adam(net.parameters(), lr=1e-2) + + '''train''' + var_x = torch.tensor(train_x, dtype=torch.float32, device=device) + var_y = torch.tensor(train_y, dtype=torch.float32, device=device) + + batch_var_x = list() + batch_var_y = list() + + for i in range(batch_size): + j = batch_size - i + batch_var_x.append(var_x[j:]) + batch_var_y.append(var_y[j:]) + + from torch.nn.utils.rnn import pad_sequence + batch_var_x = pad_sequence(batch_var_x) + batch_var_y = pad_sequence(batch_var_y) + + with torch.no_grad(): + weights = np.tanh(np.arange(len(train_y)) * (np.e / len(train_y))) + weights = torch.tensor(weights, dtype=torch.float32, device=device) + + print("Training Start") + for e in range(10): + out = net(batch_var_x) + + # loss = criterion(out, batch_var_y) + loss = (out - batch_var_y) ** 2 * weights + loss = loss.mean() + + optimizer.zero_grad() + loss.backward() + optimizer.step() + + if e % 64 == 0: + print('Epoch: {:4}, Loss: {:.5f}'.format(e, loss.item())) + torch.save(net.state_dict(), '{}/net.pth'.format(mod_dir)) + print("Save in:", '{}/net.pth'.format(mod_dir)) + + '''eval''' + net.load_state_dict(torch.load('{}/net.pth'.format(mod_dir), map_location=lambda storage, loc: storage)) + net = net.eval() + + # X1, ys1 = train_test_data['X_test'], train_test_data['y_test'] + # check_X_y(X1, ys1, multi_output=True) + # test_x = X1.values + # test_y = ys1['physics_err_r_x'].values + # test_y = ys1.reshape(ys.shape[0], 1) + # test_x = + + + test_x = data_x.copy() + test_x[train_size:, 0] = 0 + # print(test_x) + test_x = test_x[:, np.newaxis, :] + test_x = torch.tensor(test_x, dtype=torch.float32, device=device) + + '''simple way but no elegant''' + for i in range(train_size, len(data) - 2): + test_y = net(test_x[:i]) + test_x[i, 0, 0] = test_y[-1] + + '''elegant way but slightly complicated''' + # eval_size = 1 + # zero_ten = torch.zeros((mid_layers, eval_size, mid_dim), dtype=torch.float32, device=device) + # test_y, hc = net.output_y_hc(test_x[:train_size], (zero_ten, zero_ten)) + # test_x[train_size + 1, 0, 0] = test_y[-1] + # for i in range(train_size + 1, len(data) - 2): + # test_y, hc = net.output_y_hc(test_x[i:i + 1], hc) + # test_x[i + 1, 0, 0] = test_y[-1] + + pred_y = test_x[1:, 0, 0] + pred_y = pred_y.cpu().data.numpy() + print("`````````````````````````") + print(pred_y.shape) + diff_y = pred_y[train_size:] - data_y[train_size:-1] + print("------") + # print(pred_y[train_size:]) + print("------") + # print(data_y[train_size:-1]) + r2 = r2_score(data_y[train_size:-1], pred_y[train_size:], multioutput= 'uniform_average') + evals = [] + eval_dict = {'Error': 'physics_err_v_x', 'R^2': r2} + evals.append(eval_dict) + print(pd.DataFrame(evals)) + + l1_loss = np.mean(np.abs(diff_y)) + l2_loss = np.mean(diff_y ** 2) + print("L1: {:.3f} L2: {:.3f}".format(l1_loss, l2_loss)) + + plt.plot(pred_y, 'r', label='pred') + plt.plot(data_y, 'b', label='real', alpha=0.3) + plt.plot([train_size, train_size], [-1, 2], color='k', label='train | pred') + plt.legend(loc='best') + plt.savefig('lstm_reg.png') + plt.pause(4) + + +def run_origin(): + inp_dim = 2 + out_dim = 1 + mod_dir = '.' + + '''load data''' + data = load_data() # axis1: number, year, month + data_x = np.concatenate((data[:-2, 0:1], data[+1:-1, 0:1]), axis=1) + data_y = data[2:, 0] + + train_size = int(len(data_x) * 0.75) + train_x = data_x[:train_size] + train_y = data_y[:train_size] + + train_x = train_x.reshape((-1, 1, inp_dim)) + train_y = train_y.reshape((-1, 1, out_dim)) + + '''build model''' + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + net = RegLSTM(inp_dim, out_dim, mid_dim=4, mid_layers=2).to(device) + criterion = nn.SmoothL1Loss() + optimizer = torch.optim.Adam(net.parameters(), lr=1e-2) + + '''train''' + var_x = torch.tensor(train_x, dtype=torch.float32, device=device) + var_y = torch.tensor(train_y, dtype=torch.float32, device=device) + print('var_x.size():', var_x.size()) + print('var_y.size():', var_y.size()) + + for e in range(512): + out = net(var_x) + loss = criterion(out, var_y) + + optimizer.zero_grad() + loss.backward() + optimizer.step() + + if (e + 1) % 100 == 0: # 每 100 次输出结果 + print('Epoch: {}, Loss: {:.5f}'.format(e + 1, loss.item())) + + torch.save(net.state_dict(), '{}/net.pth'.format(mod_dir)) + + '''eval''' + # net.load_state_dict(torch.load('{}/net.pth'.format(mod_dir), map_location=lambda storage, loc: storage)) + net = net.eval() # 转换成测试模式 + + """ + inappropriate way of seq prediction: + use all real data to predict the number of next month + """ + test_x = data_x.reshape((-1, 1, inp_dim)) + var_data = torch.tensor(test_x, dtype=torch.float32, device=device) + eval_y = net(var_data) # 测试集的预测结果 + pred_y = eval_y.view(-1).cpu().data.numpy() + + plt.plot(pred_y[1:], 'r', label='pred inappr', alpha=0.3) + plt.plot(data_y, 'b', label='real', alpha=0.3) + plt.plot([train_size, train_size], [-1, 2], label='train | pred') + + """ + appropriate way of seq prediction: + use real+pred data to predict the number of next 3 years. + """ + test_x = data_x.reshape((-1, 1, inp_dim)) + test_x[train_size:] = 0 # delete the data of next 3 years. + test_x = torch.tensor(test_x, dtype=torch.float32, device=device) + for i in range(train_size, len(data) - 2): + test_y = net(test_x[:i]) + test_x[i, 0, 0] = test_x[i - 1, 0, 1] + test_x[i, 0, 1] = test_y[-1, 0] + pred_y = test_x.cpu().data.numpy() + pred_y = pred_y[:, 0, 0] + plt.plot(pred_y[2:], 'g', label='pred appr') + + plt.legend(loc='best') + plt.savefig('lstm_origin.png') + plt.pause(4) + + +class RegLSTM(nn.Module): + def __init__(self, inp_dim, out_dim, mid_dim, mid_layers): + super(RegLSTM, self).__init__() + + self.rnn = nn.LSTM(inp_dim, mid_dim, mid_layers) # rnn + self.reg = nn.Sequential( + nn.Linear(mid_dim, mid_dim), + nn.Tanh(), + nn.Linear(mid_dim, out_dim), + ) # regression + + def forward(self, x): + y = self.rnn(x)[0] # y, (h, c) = self.rnn(x) + + seq_len, batch_size, hid_dim = y.shape + y = y.view(-1, hid_dim) + y = self.reg(y) + y = y.view(seq_len, batch_size, -1) + return y + + """ + PyCharm Crtl+click nn.LSTM() jump to code of PyTorch: + Examples:: + >>> rnn = nn.LSTM(10, 20, 2) + >>> input = torch.randn(5, 3, 10) + >>> h0 = torch.randn(2, 3, 20) + >>> c0 = torch.randn(2, 3, 20) + >>> output, (hn, cn) = rnn(input, (h0, c0)) + """ + + def output_y_hc(self, x, hc): + y, hc = self.rnn(x, hc) # y, (h, c) = self.rnn(x) + + seq_len, batch_size, hid_dim = y.size() + y = y.view(-1, hid_dim) + y = self.reg(y) + y = y.view(seq_len, batch_size, -1) + return y, hc + + +class RegGRU(nn.Module): + def __init__(self, inp_dim, out_dim, mod_dim, mid_layers): + super(RegGRU, self).__init__() + + self.rnn = nn.GRU(inp_dim, mod_dim, mid_layers) + self.reg = nn.Linear(mod_dim, out_dim) + + def forward(self, x): + x, h = self.rnn(x) # (seq, batch, hidden) + + seq_len, batch_size, hid_dim = x.shape + x = x.view(-1, hid_dim) + x = self.reg(x) + x = x.view(seq_len, batch_size, -1) + return x + + def output_y_h(self, x, h): + y, h = self.rnn(x, h) + + seq_len, batch_size, hid_dim = y.size() + y = y.view(-1, hid_dim) + y = self.reg(y) + y = y.view(seq_len, batch_size, -1) + return y, h + + +def load_data(): + # passengers number of international airline , 1949-01 ~ 1960-12 per month + seq_number = np.array( + [112., 118., 132., 129., 121., 135., 148., 148., 136., 119., 104., + 118., 115., 126., 141., 135., 125., 149., 170., 170., 158., 133., + 114., 140., 145., 150., 178., 163., 172., 178., 199., 199., 184., + 162., 146., 166., 171., 180., 193., 181., 183., 218., 230., 242., + 209., 191., 172., 194., 196., 196., 236., 235., 229., 243., 264., + 272., 237., 211., 180., 201., 204., 188., 235., 227., 234., 264., + 302., 293., 259., 229., 203., 229., 242., 233., 267., 269., 270., + 315., 364., 347., 312., 274., 237., 278., 284., 277., 317., 313., + 318., 374., 413., 405., 355., 306., 271., 306., 315., 301., 356., + 348., 355., 422., 465., 467., 404., 347., 305., 336., 340., 318., + 362., 348., 363., 435., 491., 505., 404., 359., 310., 337., 360., + 342., 406., 396., 420., 472., 548., 559., 463., 407., 362., 405., + 417., 391., 419., 461., 472., 535., 622., 606., 508., 461., 390., + 432.], dtype=np.float32) + # assert seq_number.shape == (144, ) + # plt.plot(seq_number) + # plt.ion() + # plt.pause(1) + seq_number = seq_number[:, np.newaxis] + + # print(repr(seq)) + # 1949~1960, 12 years, 12*12==144 month + seq_year = np.arange(12) + seq_month = np.arange(12) + seq_year_month = np.transpose( + [np.repeat(seq_year, len(seq_month)), + np.tile(seq_month, len(seq_year))], + ) # Cartesian Product + + seq = np.concatenate((seq_number, seq_year_month), axis=1) + + # normalization + seq = (seq - seq.mean(axis=0)) / seq.std(axis=0) + return seq + + +if __name__ == '__main__': + run_train_lstm(X) + # run_train_gru() + # run_origin() + + + diff --git a/nn_ZSH/DNN.py b/nn_ZSH/DNN.py new file mode 100644 index 0000000..b58d1eb --- /dev/null +++ b/nn_ZSH/DNN.py @@ -0,0 +1,117 @@ +import itertools +from sklearn.model_selection import train_test_split +import pandas as pd +import numpy as np +import matplotlib.pyplot as plt +from keras import layers, models + + +def get_state_vect_cols(prefix=''): + if prefix: + prefix += '_' + vectors = ['r', 'v'] + components = ['x', 'y', 'z'] + col_names = [f'{prefix}{v}_{c}' + for v, c + in itertools.product(vectors, components)] + return col_names + + +def build_train_test_sets(df, test_size=0.2): + # Features are the physics predicted state vectors and the amount of + # time in seconds into the future the prediction was made + feature_cols = ['elapsed_seconds'] + get_state_vect_cols('physics_pred') \ + + get_state_vect_cols('start') + # The target values are the errors between the physical model predictions + # and the ground truth observations + target_cols = get_state_vect_cols('physics_err') + # Create feature and target matrices + X = df[feature_cols] + y = df[target_cols] + # Split feature and target data into training and test sets + data_keys = ['X_train', 'X_test', 'y_train', 'y_test'] + data_vals = train_test_split(X, y, test_size=test_size) + train_test_data = dict(zip(data_keys, data_vals)) + return train_test_data + + +def get_data(file_path): + print('Loading physical model orbit prediction training data...') + physics_pred_df = pd.read_parquet(file_path) + print('Building training and test sets...') + train_test_data = build_train_test_sets(physics_pred_df) + x_train = train_test_data['X_train'] + x_test = train_test_data['X_test'] + y_train = train_test_data['y_train'] + y_test = train_test_data['y_test'] + return x_train, y_train, x_test, y_test + + +# 模型定义 +def build_model(): + network = models.Sequential() + network.add(layers.Dense(64, activation='relu', input_shape=(13, ))) + network.add(layers.Dense(64, activation='relu')) + network.add(layers.Dense(1)) # 最后输出预测值,恒等函数 + #损失函数用mes(均方误差), 监控指标为mae(平均绝对误差, 返回误差绝对值) + network.compile(optimizer='rmsprop', loss='mse', metrics=['mae']) + return network + + +file_path = r"ssa/traindata/physics_preds.parquet" +train_data, train_labels, test_data, test_labels = get_data(file_path) + +# # 数据标准化,减去平均值再除以标准差(测试数据也用训练数据的标准差) +# mean = train_data.mean(axis=0) +# train_data -= mean +# std = train_data.std(axis=0) +# train_data /= std +# test_data -= mean +# test_data /= std + +x_train = np.array(train_data) +print(x_train.shape) +x_test = np.array(test_data) +print(x_test.shape) + +features = ['physics_err_r_x', 'physics_err_r_y', 'physics_err_r_z', + 'physics_err_v_x', 'physics_err_v_y', 'physics_err_v_z'] +ave_r2 = 0. +for i in range(6): + y_train = np.array(train_labels[features[i]]) + print(y_train.shape) + y_test = np.array(test_labels[features[i]]) + print(y_test.shape) + + network = build_model() + network.summary() + History = network.fit(x_train, y_train, epochs=100, batch_size=1) + + network.save('models/DNN{0}.h5'.format(i+1)) + # 用训练好的模型衡量测试数据精确度 + mse, mae = network.evaluate(x_test, y_test) + rmse = mse**0.5 + r2 = 1-mse / np.var(y_test) + print(features[i], ": mse:", mse, " rmse:", + rmse, " mae:", mae, " r2:", r2) + ave_r2 += r2 + + # #用训练好的网络预测结果 + # y_p = network.predict(x_test) + + # 绘制图像 + history_dict = History.history + print(history_dict.keys()) + metric_list = history_dict['mae'] + + x = range(1, len(metric_list) + 1) + + plt.figure(i) + plt.plot(x, metric_list) + plt.title('Training_mae') + plt.xlabel('Epoches') + plt.ylabel('mean abs error') + plt.legend() + plt.show() + +print('ave_r2: ', ave_r2/6) diff --git a/regressors/__pycache__/linear_study.cpython-38.pyc b/regressors/__pycache__/linear_study.cpython-38.pyc new file mode 100644 index 0000000..293b995 Binary files /dev/null and b/regressors/__pycache__/linear_study.cpython-38.pyc differ diff --git a/regressors/__pycache__/linear_sum_regressor.cpython-38.pyc b/regressors/__pycache__/linear_sum_regressor.cpython-38.pyc index 6dde935..8c35488 100644 Binary files a/regressors/__pycache__/linear_sum_regressor.cpython-38.pyc and b/regressors/__pycache__/linear_sum_regressor.cpython-38.pyc differ diff --git a/regressors/__pycache__/nn_use.cpython-38.pyc b/regressors/__pycache__/nn_use.cpython-38.pyc new file mode 100644 index 0000000..73df1f4 Binary files /dev/null and b/regressors/__pycache__/nn_use.cpython-38.pyc differ diff --git a/regressors/__pycache__/normal_use.cpython-38.pyc b/regressors/__pycache__/normal_use.cpython-38.pyc index 3a310df..2d1ec34 100644 Binary files a/regressors/__pycache__/normal_use.cpython-38.pyc and b/regressors/__pycache__/normal_use.cpython-38.pyc differ diff --git a/regressors/__pycache__/one_nn.cpython-38.pyc b/regressors/__pycache__/one_nn.cpython-38.pyc new file mode 100644 index 0000000..d8ddec2 Binary files /dev/null and b/regressors/__pycache__/one_nn.cpython-38.pyc differ diff --git a/regressors/__pycache__/one_regressor.cpython-38.pyc b/regressors/__pycache__/one_regressor.cpython-38.pyc index 64ec046..46ba205 100644 Binary files a/regressors/__pycache__/one_regressor.cpython-38.pyc and b/regressors/__pycache__/one_regressor.cpython-38.pyc differ diff --git a/regressors/linear_study.py b/regressors/linear_study.py index b949495..5ec2596 100644 --- a/regressors/linear_study.py +++ b/regressors/linear_study.py @@ -1 +1,113 @@ -# wait for tommorow! \ No newline at end of file +from scipy.sparse.construct import random +from normal_use import * + + +sumRegressors = [LGBMRegressor, RandomForestRegressor, XGBRegressor, CatBoostRegressor] +sumRegressor = Union[type(sumRegressors)] +sumParams = [{},{},{},{"silent": True}] +weight = [0.1, 0.2, 0.3, 0.4] +Sums = {} +train_test_data = None +out_weights = [] + + +oof_train = {} +oof_test = {} +# Some changes +# LinearRegression, Ridge, XGBRegressor, CatBoostRegressor, LGBMRegressor +# deepforest.CascadeForestRegressor +REG_TOTAL = Ridge + + +def get_random_small_train(X, y, Percentage = 0.8, seed = 0): + # return X_train & y_train + data_keys = ['X_train', 'X_test', 'y_train', 'y_test'] + data_vals = train_test_split(X, y, random_state=seed,test_size=(1-Percentage)) + train_test_data = dict(zip(data_keys, data_vals)) + return train_test_data['X_train'], train_test_data['y_train'] + + +def train_one_regressor(id, regType: sumRegressor, use_RFsample = False, seed = 0): + full_X, full_ys = train_test_data['X_train'], train_test_data['y_train'] + tX, tys = train_test_data['X_test'], train_test_data['y_test'] + X, ys = full_X, full_ys + if use_RFsample: + X, ys = get_random_small_train(X, ys, seed=seed) + # which xxx_moon? + # make_moons(n_samples=100, shuffle=True, noise=None, random_state=None) + # pass + check_X_y(X, ys, multi_output=True) + models = {} + evals = [] + for target_col in ys.columns: + y = ys[target_col] + reg = regType(**sumParams[id]) + reg.fit(X, y) + models[target_col] = reg + # test in full train_test + y_hat = reg.predict(full_X) + oof_train[target_col].append(y_hat.reshape(-1, 1)) + # predict test + ty_hat = reg.predict(tX) + oof_test[target_col].append(ty_hat.reshape(-1, 1)) + ty = tys[target_col] + # one evals + rmse = metrics.mean_squared_error(ty, ty_hat, squared=False) + r2 = metrics.r2_score(ty, ty_hat) + eval_dict = {'Error': target_col, 'RMSE': rmse, 'R^2': r2} + evals.append(eval_dict) + print(regType.__name__) + print(pd.DataFrame(evals)) + print("Average R2: ", average_R2(evals)) + + joblib.dump(models, f"linear/{regType.__name__}_study_{id}.model") + +def train_linear_sumer(): + ys = train_test_data['y_train'] + tys = train_test_data['y_test'] # real + evals = [] + for target_col in oof_train: + X = np.hstack(oof_train[target_col]) + tX = np.hstack(oof_test[target_col]) + print(ys.shape,X.shape,tys.shape,tX.shape) + y = ys[target_col] + ty = tys[target_col] + clf = REG_TOTAL() + clf.fit(X, y) + ty_hat = clf.predict(tX) # fake + rmse = metrics.mean_squared_error(ty, ty_hat, squared=False) + r2 = metrics.r2_score(ty, ty_hat) + eval_dict = {'Error': target_col, 'RMSE': rmse, 'R^2': r2} + evals.append(eval_dict) + + print("linear *study* for {} regressors!".format(len(sumRegressors))) + print(pd.DataFrame(evals)) + print("Average R2: ", average_R2(evals)) + +def study_linear(trainset): + """ + Description + ----------- + create a linear combination, weight and regressors here to change + + Parameters + ---------- + trainset : dict + train_data_set + + Returns + ------- + print result on screen + """ + global train_test_data + train_test_data = trainset + + for target_col in train_test_data['y_train'].columns: + oof_train[target_col] = [] + oof_test[target_col] = [] + + for i, reg in enumerate(sumRegressors): + train_one_regressor(i, reg, use_RFsample=True, seed=1024) + + train_linear_sumer() + diff --git a/regressors/linear_sum_regressor.py b/regressors/linear_sum_regressor.py index 635aa36..a5f0699 100644 --- a/regressors/linear_sum_regressor.py +++ b/regressors/linear_sum_regressor.py @@ -20,7 +20,7 @@ def train_linear(id, regType: sumRegressor, use_RFsample = False): X, ys = train_test_data['X_train'], train_test_data['y_train'] if use_RFsample: # X, ys = get_random_small_train(X, ys) - # which xxx_moon? + # make_moons(n_samples=100, shuffle=True, noise=None, random_state=None) pass check_X_y(X, ys, multi_output=True) models = {} diff --git a/regressors/nn_use.py b/regressors/nn_use.py new file mode 100644 index 0000000..cce7f4b --- /dev/null +++ b/regressors/nn_use.py @@ -0,0 +1,203 @@ +# FNN + +import torch +import torch.nn as nn +import torchvision.transforms as transforms +import torchvision.datasets as dsets +import pandas as pd +from normal_use import * + +device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") + +class FNN_Net(nn.Module): + + def __init__(self): + super(FNN_Net, self).__init__() + self.features = 0 + self.linear_relu1 = nn.Linear(self.features, 128) + self.linear_relu2 = nn.Linear(128, 256) + self.linear_relu3 = nn.Linear(256, 256) + self.linear_relu4 = nn.Linear(256, 256) + self.linear5 = nn.Linear(256, 1) + + def forward(self, x): + y_pred = self.linear_relu1(x) + y_pred = nn.functional.relu(y_pred) + + y_pred = self.linear_relu2(y_pred) + y_pred = nn.functional.relu(y_pred) + + y_pred = self.linear_relu3(y_pred) + y_pred = nn.functional.relu(y_pred) + + y_pred = self.linear_relu4(y_pred) + y_pred = nn.functional.relu(y_pred) + + y_pred = self.linear5(y_pred) + return y_pred + + def fit(self, X, y_all): + """ + Description + ----------- + fit + + Parameters + ---------- + X - train data X + y_all - train data ys + + """ + + y = y_all + # 训练数据集特征 + train_features = torch.tensor(X.values, dtype=torch.float) + train_features = train_features.to(device) + # 训练数据集目标 + train_labels = torch.tensor(y.values, dtype=torch.float).view(-1, 1) + train_labels = train_labels.to(device) + + self.features=train_features.shape[1] + self.linear_relu1 = nn.Linear(self.features, 128) + self = self.to(device) + self.train() + + criterion = nn.MSELoss(reduction='mean') + criterion = criterion.to(device) + optimizer = torch.optim.Adam(self.parameters(), lr=1e-4) + + losses = [] + epoch = 5000 + epoch_real = 0 + for t in range(epoch): + y_pred = self(train_features) + y_pred.to(device) + + loss = criterion(y_pred, train_labels) + losses.append(loss.item()) + + if torch.isnan(loss): + break + + optimizer.zero_grad() + + loss.backward() + + optimizer.step() + epoch_real = epoch_real + 1 + + print(f"epoch:{epoch_real} loss:{losses[-1]}") + + + def predict(self, X): + self.to(device) + test_features = torch.tensor(X.values, dtype=torch.float) + test_features = test_features.to(device) + pred_labels = self(test_features) + pred_labels = pred_labels.cpu().data.numpy() + return pred_labels + + +# # DNN equal next NN +# import itertools +# from sklearn.model_selection import train_test_split +# import pandas as pd +# import numpy as np +# from keras import layers, models + +# # 模型定义 + +# class DNN_Net(): +# def build_model(self): +# network = models.Sequential() +# network.add(layers.Dense(64, activation='relu', input_shape=(13, ))) +# network.add(layers.Dense(64, activation='relu')) +# network.add(layers.Dense(1)) # 最后输出预测值,恒等函数 +# #损失函数用mes(均方误差), 监控指标为mae(平均绝对误差, 返回误差绝对值) +# network.compile(optimizer='rmsprop', loss='mse', metrics=['mae']) +# return network + +# def __init__(self): +# self.network = self.build_model() + + +# def fit(self, X, ys): +# self.network.summary() +# self.network.fit(X, ys, epochs=1, batch_size=1) + +# def predict(self, X): +# res = self.network.predict(X) +# print(res) +# return res + + +# NN + +import torch +import torch.nn as nn +from torch.utils.data import TensorDataset, DataLoader +from sklearn import metrics + +class NN_Net(nn.Module): + def __init__(self): + super(NN_Net, self).__init__() + self.linear_relu1 = nn.Linear(13, 64) + self.relu6 = nn.ReLU6() + # self.relu6 = nn.ReLU() + self.linear_relu2 = nn.Linear(64, 64) + self.leaky = nn.LeakyReLU() + # self.leaky = nn.ReLU() + self.linear3 = nn.Linear(64, 1) + + def forward(self, x): + y_pred = self.linear_relu1(x) + y_pred = self.relu6(y_pred) + + y_pred = self.linear_relu2(y_pred) + y_pred = self.leaky(y_pred) + + y_pred = self.linear3(y_pred) + return y_pred + + def fit(self, X, y_all): + """ + Description + ----------- + fit + + Parameters + ---------- + X - train data X + y_all - train data ys + + """ + # 训练数据集特征 + train_features = torch.tensor(X.values, dtype=torch.float) + train_features = train_features.to(device) + # 训练数据集目标 + train_labels = torch.tensor(y_all.values, dtype=torch.float).view(-1, 1) + train_labels = train_labels.to(device) + + self = self.to(device) + # self.train() + + opti = torch.optim.SGD(self.parameters(), lr=0.02) + loss_func = nn.MSELoss() + train_dataloader = DataLoader(TensorDataset(train_features, train_labels), batch_size=320) + for t in range(1000): + for batch, (x, y) in enumerate(train_dataloader): + pred = self(x) + loss = loss_func(pred, y) + opti.zero_grad() + torch.sqrt(loss).backward() + opti.step() + print(t) + + + def predict(self, X): + self.to(device) + test_features = torch.tensor(X.values, dtype=torch.float) + test_features = test_features.to(device) + pred_labels = self(test_features) + pred_labels = pred_labels.cpu().data.numpy() + return pred_labels diff --git a/regressors/normal_use.py b/regressors/normal_use.py index bc0a1f9..5677c7c 100644 --- a/regressors/normal_use.py +++ b/regressors/normal_use.py @@ -13,8 +13,11 @@ import pandas as pd from typing import Union from sklearn import metrics from sklearn.model_selection import train_test_split +from sklearn.datasets import make_moons from sklearn.utils.validation import check_X_y import joblib +from sklearn.metrics import r2_score + def average_R2(evals): sum = 0 diff --git a/regressors/one_nn.py b/regressors/one_nn.py new file mode 100644 index 0000000..ab1b568 --- /dev/null +++ b/regressors/one_nn.py @@ -0,0 +1,68 @@ +import keras +from keras.engine.data_adapter import KerasSequenceAdapter +from normal_use import * +from nn_use import FNN_Net, NN_Net + + +Regressors = [NN_Net] +# Params = ['',''] +Regressor = Union[type(Regressors)] +train_test_data = None + + +def train_model(id, regType: Regressor): + X, ys = train_test_data['X_train'], train_test_data['y_train'] + check_X_y(X, ys, multi_output=True) + models = {} + for target_col in ys.columns: + y = ys[target_col] + reg = regType() + reg.fit(X, y) + models[target_col] = reg + print(regType.__name__, target_col) + joblib.dump(models, f"nn_models/{regType.__name__}.model") + # keras.models.save_model(models, f"nn_models/{regType.__name__}.model") + + +def eval_model(regType: Regressor): + models = joblib.load(f"nn_models/{regType.__name__}.model") + X, ys = train_test_data['X_test'], train_test_data['y_test'] + evals = [] + for target_col, reg in models.items(): + y_hat = reg.predict(X) # fake + y = ys[target_col] # real + rmse = metrics.mean_squared_error(y, y_hat, squared=False) + r2 = metrics.r2_score(y, y_hat) + eval_dict = {'Error': target_col, 'RMSE': rmse, 'R^2': r2} + evals.append(eval_dict) + print(regType.__name__) + print(pd.DataFrame(evals)) + print("Average R2: ", average_R2(evals)) + + +def train_one_models(trainsets): + """ + Description + ----------- + call this to start trainning each regressors. + + Parameters + ---------- + trainset : dict + use joblib to extract target dataset(create_datas) and put it in here. + + Returns + ------- + NO returns, but models in folder "models" and print R2 on screen + + """ + global train_test_data + train_test_data = trainsets + + for i, reg in enumerate(Regressors): + train_model(i, reg) + + for reg in Regressors: + eval_model(reg) + + \ No newline at end of file diff --git a/test_full.py b/test_full.py index ab8312e..c018498 100644 --- a/test_full.py +++ b/test_full.py @@ -7,11 +7,17 @@ import create_traindata import one_regressor import stacking_regressor import linear_sum_regressor +import linear_study +import one_nn # %% # create train data seed = 514 create_traindata.create_train_data(seed=seed) + +# %% +# load data +seed = 514 train_test_data = joblib.load(f"create_datas/seed_{seed}.td") # %% @@ -24,4 +30,14 @@ stacking_regressor.stacking_train(train_test_data) # %% # test linear combination -linear_sum_regressor.only_linear(train_test_data) \ No newline at end of file +linear_sum_regressor.only_linear(train_test_data) + +# %% +# test linear study method +linear_study.study_linear(train_test_data) + +# %% +# seed = 514 +# train_test_data = joblib.load(f"create_datas/seed_{seed}.td") +one_nn.train_one_models(train_test_data) +# %%