Skip to content

Commit

Permalink
Disable verbose param for multiprocessing
Browse files Browse the repository at this point in the history
  • Loading branch information
roquelopez committed Apr 30, 2024
1 parent a6871be commit 6bab136
Show file tree
Hide file tree
Showing 5 changed files with 11 additions and 40 deletions.
10 changes: 5 additions & 5 deletions alpha_automl/automl_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,10 +63,10 @@ def __init__(self, time_bound=15, metric=None, split_strategy='holdout', time_bo
self.y = None
self.leaderboard = None
self.automl_manager = AutoMLManager(self.output_folder, checkpoints_folder, time_bound, time_bound_run, task, num_cpus, verbose)
self._start_method = get_start_method(start_mode)
set_start_method(self._start_method, force=True)
check_input_for_multiprocessing(self._start_method, self.scorer._score_func, 'metric')
check_input_for_multiprocessing(self._start_method, self.splitter, 'split strategy')
#self._start_method = get_start_method(start_mode)
#set_start_method(self._start_method, force=True)
#check_input_for_multiprocessing(self._start_method, self.scorer._score_func, 'metric')
#check_input_for_multiprocessing(self._start_method, self.splitter, 'split strategy')
self.label_encoder = None
self.task_type = task

Expand Down Expand Up @@ -196,7 +196,7 @@ def add_primitives(self, new_primitives):
:param new_primitives: Set of new primitives, tuples of name and object primitive
"""
for primitive_object, primitive_type in new_primitives:
check_input_for_multiprocessing(self._start_method, primitive_object, 'primitive')
#check_input_for_multiprocessing(self._start_method, primitive_object, 'primitive')
primitive_name = f'{primitive_object.__module__}.{primitive_object.__class__.__name__}'
primitive_name = primitive_name.replace('__', '') # Sklearn restriction on estimator names
self.new_primitives[primitive_name] = {'primitive_object': primitive_object,
Expand Down
6 changes: 2 additions & 4 deletions alpha_automl/automl_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def _search_pipelines(self, automl_hyperparams):

pipelines = search_pipelines_proc(X, y, self.scoring, internal_splitting_strategy, self.task,
self.time_bound, automl_hyperparams, metadata, self.output_folder,
self.checkpoints_folder, self.verbose)
self.checkpoints_folder)

found_pipelines = 0

Expand All @@ -72,9 +72,7 @@ def _search_pipelines(self, automl_hyperparams):
while pipelines and found_pipelines < pipeline_threshold:
pipeline = pipelines.pop()
try:
alphaautoml_pipeline = score_pipeline(pipeline, X, y, self.scoring,
self.splitting_strategy, self.task,
self.verbose)
alphaautoml_pipeline = score_pipeline(pipeline, X, y, self.scoring, self.splitting_strategy, self.task)

if alphaautoml_pipeline is not None:
score = alphaautoml_pipeline.get_score()
Expand Down
2 changes: 1 addition & 1 deletion alpha_automl/pipeline_search/agent_lab.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def pipeline_search_rllib(game, time_bound, checkpoint_load_folder, checkpoint_s
"""
Search for pipelines using Rllib
"""
ray.init(local_mode=True, num_cpus=8)
ray.init(local_mode=True, num_cpus=8, logging_level=logging.CRITICAL, log_to_driver=False)
num_cpus = int(ray.available_resources()["CPU"])

# load checkpoint or create a new one
Expand Down
30 changes: 2 additions & 28 deletions alpha_automl/pipeline_synthesis/setup_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,31 +31,7 @@
}


def signal_handler(queue, signum):
logger.debug(f"Receiving signal {signum}, terminating process")
queue.append("DONE")
# TODO: Should it save the last status of the NN model?
sys.exit(0)


def search_pipelines(
X,
y,
scoring,
splitting_strategy,
task_name,
time_bound,
automl_hyperparams,
metadata,
output_folder,
checkpoints_folder,
verbose,
):
# signal.signal(signal.SIGTERM, lambda signum, frame: signal_handler(queue, signum))
hide_logs(
verbose
) # Hide logs here too, since multiprocessing has some issues with loggers

def search_pipelines(X, y, scoring, splitting_strategy, task_name, time_bound, automl_hyperparams, metadata, output_folder, checkpoints_folder):
builder = BaseBuilder(metadata, automl_hyperparams)
all_primitives = builder.all_primitives
ensemble_pipelines_hash = set()
Expand All @@ -75,9 +51,7 @@ def evaluate_pipeline(primitives):
score = None

if pipeline is not None:
alphaautoml_pipeline = score_pipeline(
pipeline, X, y, scoring, splitting_strategy, task_name, verbose
)
alphaautoml_pipeline = score_pipeline(pipeline, X, y, scoring, splitting_strategy, task_name)
if alphaautoml_pipeline is not None:
score = alphaautoml_pipeline.get_score()
if score is not None:
Expand Down
3 changes: 1 addition & 2 deletions alpha_automl/scorer.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,8 +121,7 @@ def make_splitter(splitting_strategy, splitting_strategy_kwargs=None):
f'instance of BaseCrossValidator, BaseShuffleSplit, RepeatedSplits.')


def score_pipeline(pipeline, X, y, scoring, splitting_strategy, task_name, verbose):
hide_logs(verbose) # Hide logs here too, since multiprocessing has some issues with loggers
def score_pipeline(pipeline, X, y, scoring, splitting_strategy, task_name):
score = None
start_time = None
end_time = None
Expand Down

0 comments on commit 6bab136

Please sign in to comment.