L
Initializing Studio...
Automated hyperparameter optimization for Large Language Models using advanced search algorithms and neural architecture search techniques.
1import langtrain23# Create model with auto-tuning enabled4model = langtrain.Model.create(5 name="auto-tuned-classifier",6 architecture="bert-base-uncased",7 task="classification",8 auto_tune=True # Enable auto-tuning9)1011# Load your dataset12dataset = langtrain.Dataset.from_csv("data.csv")1314# Start auto-tuning15tuner = langtrain.AutoTuner(16 model=model,17 dataset=dataset,18 max_trials=50, # Number of configurations to try19 max_epochs=10, # Maximum epochs per trial20 objective="f1_score" # Metric to optimize21)2223# Run optimization24best_config = tuner.optimize()25print(f"Best configuration: {best_config}")26print(f"Best score: {tuner.best_score}")
1# Define custom hyperparameter search space2search_space = {3 'learning_rate': langtrain.hp.loguniform(1e-6, 1e-3),4 'batch_size': langtrain.hp.choice([8, 16, 32, 64]),5 'dropout_rate': langtrain.hp.uniform(0.1, 0.5),6 'weight_decay': langtrain.hp.loguniform(1e-6, 1e-2),7 'warmup_ratio': langtrain.hp.uniform(0.0, 0.2),8 'optimizer': langtrain.hp.choice(['adam', 'adamw', 'sgd'])9}1011# Configure auto-tuner with custom search space12tuner = langtrain.AutoTuner(13 model=model,14 dataset=dataset,15 search_space=search_space,16 algorithm="bayesian", # Optimization algorithm17 max_trials=100,18 timeout=3600 # 1 hour timeout19)2021# Run with early stopping22best_config = tuner.optimize(23 early_stopping_patience=10,24 min_improvement=0.00125)
1# Optimize for multiple objectives2objectives = {3 'accuracy': 'maximize',4 'inference_time': 'minimize',5 'model_size': 'minimize'6}78tuner = langtrain.MultiObjectiveTuner(9 model=model,10 dataset=dataset,11 objectives=objectives,12 max_trials=20013)1415# Get Pareto-optimal solutions16pareto_solutions = tuner.optimize()1718# Select best trade-off based on your priorities19best_config = tuner.select_best(20 weights={'accuracy': 0.7, 'inference_time': 0.2, 'model_size': 0.1}21)
1# Use population-based training for dynamic optimization2pbt_config = langtrain.PBTConfig(3 population_size=20, # Number of parallel training runs4 perturbation_interval=5, # Epochs between perturbations5 mutation_rate=0.2, # Probability of parameter mutation6 truncation_percentage=0.2 # Bottom 20% get replaced7)89tuner = langtrain.PopulationBasedTuner(10 model=model,11 dataset=dataset,12 config=pbt_config,13 total_epochs=5014)1516# This will train multiple models simultaneously17# and evolve their hyperparameters over time18results = tuner.train_population()1920# Get the best performing model21best_model = results.best_model22best_hyperparams = results.best_hyperparams