
    ,YHht                    P   d dl mZ d dlmZ d dlmZ d dlZd dlZd dlm	Z	 d dlm
Z
 d dlZd dlZd dlmZ d dlmZ d d	lmZ d d
lmZ d dlmZ d dlmZ d dlmZ d dlmZ d dlmZ d dlmZ d dlmZ d dlmZ d dlmZ  ej>                  e       Z!dZ" G d de      Z# G d de      Z$y)    )annotations)UserDict)SequenceN)Any)overload)distributions)logging)pruners)convert_positional_args)deprecated_func)BaseDistribution)CategoricalChoiceType)CategoricalDistribution)FloatDistribution)IntDistribution)FrozenTrial)_SUGGEST_INT_POSITIONAL_ARGS)	BaseTrialz Use suggest_float{args} instead.c                     e Zd ZdZd-dZed.d       Zddd	 	 	 	 	 	 	 	 	 	 	 d/dZ edd	e	j                  d
            d0d       Z edd	e	j                  d            d0d       Z edd	e	j                  d            d1d       Z ee      ddd	 	 	 	 	 	 	 	 	 	 	 d2d       Zed3d       Zed4d       Zed5d       Zed6d       Zed7d       Ze	 	 	 	 	 	 d8d       Z	 	 	 	 	 	 d8dZd9dZd:dZd;dZ edd       d;d!       Zd<d"Zd=d#Zd=d$Zd>d%Zd?d&Zed.d'       Zed@d(       Zed.d)       Ze edd       d.d*              ZedAd+       Z edBd,       Z!y)CTriala  A trial is a process of evaluating an objective function.

    This object is passed to an objective function and provides interfaces to get parameter
    suggestion, manage the trial's state, and set/get user-defined attributes of the trial.

    Note that the direct use of this constructor is not recommended.
    This object is seamlessly instantiated and passed to the objective function behind
    the :func:`optuna.study.Study.optimize()` method; hence library users do not care about
    instantiation of this object.

    Args:
        study:
            A :class:`~optuna.study.Study` object.
        trial_id:
            A trial ID that is automatically generated.

    c                ,   || _         || _        | j                   j                  | _        | j                  j	                  | j                        | _        t        j                  | j                   | j
                        }| j                   j                  j                  || j
                         | j                   j                  j                  || j
                        | _        d | _        | j
                  j                  j                  di       | _        y )Nfixed_params)study	_trial_id_storagestorage	get_trial_cached_frozen_trialr
   _filter_studysamplerbefore_trialinfer_relative_search_spacerelative_search_space_relative_paramssystem_attrsget_fixed_params)selfr   trial_ids      L/var/www/html/planif/env/lib/python3.12/site-packages/optuna/trial/_trial.py__init__zTrial.__init__2   s    
!zz**$(LL$:$:4>>$J!%%djj$2K2KL

''t/H/HI%)ZZ%7%7%S%S4,,&
" 8<!66CCGGXZ[    c                   | j                   jt        j                  | j                  | j                        }| j                  j
                  j                  || j                  | j                        | _         | j                   S N)r$   r
   r   r   r   r    sample_relativer#   )r(   r   s     r*   relative_paramszTrial.relative_paramsC   sh      ())$**d6O6OPE$(JJ$6$6$F$Ft00$2L2L%D! $$$r,   NF)steplogc               l    t        ||||      }| j                  ||      }| j                  ||       |S )a  Suggest a value for the floating point parameter.

        Example:

            Suggest a momentum, learning rate and scaling factor of learning rate
            for neural network training.

            .. testcode::

                import numpy as np
                from sklearn.datasets import load_iris
                from sklearn.model_selection import train_test_split
                from sklearn.neural_network import MLPClassifier

                import optuna

                X, y = load_iris(return_X_y=True)
                X_train, X_valid, y_train, y_valid = train_test_split(X, y, random_state=0)


                def objective(trial):
                    momentum = trial.suggest_float("momentum", 0.0, 1.0)
                    learning_rate_init = trial.suggest_float(
                        "learning_rate_init", 1e-5, 1e-3, log=True
                    )
                    power_t = trial.suggest_float("power_t", 0.2, 0.8, step=0.1)
                    clf = MLPClassifier(
                        hidden_layer_sizes=(100, 50),
                        momentum=momentum,
                        learning_rate_init=learning_rate_init,
                        solver="sgd",
                        random_state=0,
                        power_t=power_t,
                    )
                    clf.fit(X_train, y_train)

                    return clf.score(X_valid, y_valid)


                study = optuna.create_study(direction="maximize")
                study.optimize(objective, n_trials=3)

        Args:
            name:
                A parameter name.
            low:
                Lower endpoint of the range of suggested values. ``low`` is included in the range.
                ``low`` must be less than or equal to ``high``. If ``log`` is :obj:`True`,
                ``low`` must be larger than 0.
            high:
                Upper endpoint of the range of suggested values. ``high`` is included in the range.
                ``high`` must be greater than or equal to ``low``.
            step:
                A step of discretization.

                .. note::
                    The ``step`` and ``log`` arguments cannot be used at the same time. To set
                    the ``step`` argument to a float number, set the ``log`` argument to
                    :obj:`False`.
            log:
                A flag to sample the value from the log domain or not.
                If ``log`` is true, the value is sampled from the range in the log domain.
                Otherwise, the value is sampled from the range in the linear domain.

                .. note::
                    The ``step`` and ``log`` arguments cannot be used at the same time. To set
                    the ``log`` argument to :obj:`True`, set the ``step`` argument to :obj:`None`.

        Returns:
            A suggested float value.

        .. seealso::
            :ref:`configurations` tutorial describes more details and flexible usages.
        )r2   r1   )r   _suggest_check_distributionr(   namelowhighr1   r2   distributionsuggested_values           r*   suggest_floatzTrial.suggest_floatL   s;    h )d$G--l;  |4r,   z3.0.0z6.0.0 )args)textc                (    | j                  |||      S )ar  Suggest a value for the continuous parameter.

        The value is sampled from the range :math:`[\mathsf{low}, \mathsf{high})`
        in the linear domain. When :math:`\mathsf{low} = \mathsf{high}`, the value of
        :math:`\mathsf{low}` will be returned.

        Args:
            name:
                A parameter name.
            low:
                Lower endpoint of the range of suggested values. ``low`` is included in the range.
            high:
                Upper endpoint of the range of suggested values. ``high`` is included in the range.

        Returns:
            A suggested float value.
        r<   r(   r7   r8   r9   s       r*   suggest_uniformzTrial.suggest_uniform   s    ( !!$T22r,   z(..., log=True)c                ,    | j                  |||d      S )ao  Suggest a value for the continuous parameter.

        The value is sampled from the range :math:`[\mathsf{low}, \mathsf{high})`
        in the log domain. When :math:`\mathsf{low} = \mathsf{high}`, the value of
        :math:`\mathsf{low}` will be returned.

        Args:
            name:
                A parameter name.
            low:
                Lower endpoint of the range of suggested values. ``low`` is included in the range.
            high:
                Upper endpoint of the range of suggested values. ``high`` is included in the range.

        Returns:
            A suggested float value.
        T)r2   rA   rB   s       r*   suggest_loguniformzTrial.suggest_loguniform   s    ( !!$Tt!<<r,   z(..., step=...)c                ,    | j                  ||||      S )a  Suggest a value for the discrete parameter.

        The value is sampled from the range :math:`[\mathsf{low}, \mathsf{high}]`,
        and the step of discretization is :math:`q`. More specifically,
        this method returns one of the values in the sequence
        :math:`\mathsf{low}, \mathsf{low} + q, \mathsf{low} + 2 q, \dots,
        \mathsf{low} + k q \le \mathsf{high}`,
        where :math:`k` denotes an integer. Note that :math:`high` may be changed due to round-off
        errors if :math:`q` is not an integer. Please check warning messages to find the changed
        values.

        Args:
            name:
                A parameter name.
            low:
                Lower endpoint of the range of suggested values. ``low`` is included in the range.
            high:
                Upper endpoint of the range of suggested values. ``high`` is included in the range.
            q:
                A step of discretization.

        Returns:
            A suggested float value.
        )r1   rA   )r(   r7   r8   r9   qs        r*   suggest_discrete_uniformzTrial.suggest_discrete_uniform   s    6 !!$T!::r,   )previous_positional_arg_names   c               ~    t        ||||      }t        | j                  ||            }| j                  ||       |S )a  Suggest a value for the integer parameter.

        The value is sampled from the integers in :math:`[\mathsf{low}, \mathsf{high}]`.

        Example:

            Suggest the number of trees in `RandomForestClassifier <https://scikit-learn.org/
            stable/modules/generated/sklearn.ensemble.RandomForestClassifier.html>`__.

            .. testcode::

                import numpy as np
                from sklearn.datasets import load_iris
                from sklearn.ensemble import RandomForestClassifier
                from sklearn.model_selection import train_test_split

                import optuna

                X, y = load_iris(return_X_y=True)
                X_train, X_valid, y_train, y_valid = train_test_split(X, y)


                def objective(trial):
                    n_estimators = trial.suggest_int("n_estimators", 50, 400)
                    clf = RandomForestClassifier(n_estimators=n_estimators, random_state=0)
                    clf.fit(X_train, y_train)
                    return clf.score(X_valid, y_valid)


                study = optuna.create_study(direction="maximize")
                study.optimize(objective, n_trials=3)

        Args:
            name:
                A parameter name.
            low:
                Lower endpoint of the range of suggested values. ``low`` is included in the range.
                ``low`` must be less than or equal to ``high``. If ``log`` is :obj:`True`,
                ``low`` must be larger than 0.
            high:
                Upper endpoint of the range of suggested values. ``high`` is included in the range.
                ``high`` must be greater than or equal to ``low``.
            step:
                A step of discretization.

                .. note::
                    Note that :math:`\mathsf{high}` is modified if the range is not divisible by
                    :math:`\mathsf{step}`. Please check the warning messages to find the changed
                    values.

                .. note::
                    The method returns one of the values in the sequence
                    :math:`\mathsf{low}, \mathsf{low} + \mathsf{step}, \mathsf{low} + 2 *
                    \mathsf{step}, \dots, \mathsf{low} + k * \mathsf{step} \le
                    \mathsf{high}`, where :math:`k` denotes an integer.

                .. note::
                    The ``step != 1`` and ``log`` arguments cannot be used at the same time.
                    To set the ``step`` argument :math:`\mathsf{step} \ge 2`, set the
                    ``log`` argument to :obj:`False`.
            log:
                A flag to sample the value from the log domain or not.

                .. note::
                    If ``log`` is true, at first, the range of suggested values is divided into
                    grid points of width 1. The range of suggested values is then converted to
                    a log domain, from which a value is sampled. The uniformly sampled
                    value is re-converted to the original domain and rounded to the nearest grid
                    point that we just split, and the suggested value is determined.
                    For example, if `low = 2` and `high = 8`, then the range of suggested values is
                    `[2, 3, 4, 5, 6, 7, 8]` and lower values tend to be more sampled than higher
                    values.

                .. note::
                    The ``step != 1`` and ``log`` arguments cannot be used at the same time.
                    To set the ``log`` argument to :obj:`True`, set the ``step`` argument to 1.

        .. seealso::
            :ref:`configurations` tutorial describes more details and flexible usages.
        )r8   r9   r2   r1   )r   intr4   r5   r6   s           r*   suggest_intzTrial.suggest_int   s@    j '3TsNdmmD,?@  |4r,   c                     y r.    r(   r7   choicess      r*   suggest_categoricalzTrial.suggest_categoricalH      ORr,   c                     y r.   rO   rP   s      r*   rR   zTrial.suggest_categoricalK  rS   r,   c                     y r.   rO   rP   s      r*   rR   zTrial.suggest_categoricalN      MPr,   c                     y r.   rO   rP   s      r*   rR   zTrial.suggest_categoricalQ  s    QTr,   c                     y r.   rO   rP   s      r*   rR   zTrial.suggest_categoricalT  rV   r,   c                     y r.   rO   rP   s      r*   rR   zTrial.suggest_categoricalW  s     !$r,   c                :    | j                  |t        |            S )a  Suggest a value for the categorical parameter.

        The value is sampled from ``choices``.

        Example:

            Suggest a kernel function of `SVC <https://scikit-learn.org/stable/modules/generated/
            sklearn.svm.SVC.html>`__.

            .. testcode::

                import numpy as np
                from sklearn.datasets import load_iris
                from sklearn.model_selection import train_test_split
                from sklearn.svm import SVC

                import optuna

                X, y = load_iris(return_X_y=True)
                X_train, X_valid, y_train, y_valid = train_test_split(X, y)


                def objective(trial):
                    kernel = trial.suggest_categorical("kernel", ["linear", "poly", "rbf"])
                    clf = SVC(kernel=kernel, gamma="scale", random_state=0)
                    clf.fit(X_train, y_train)
                    return clf.score(X_valid, y_valid)


                study = optuna.create_study(direction="maximize")
                study.optimize(objective, n_trials=3)


        Args:
            name:
                A parameter name.
            choices:
                Parameter value candidates.

        .. seealso::
            :class:`~optuna.distributions.CategoricalDistribution`.

        Returns:
            A suggested value.

        .. seealso::
            :ref:`configurations` tutorial describes more details and flexible usages.
        )rQ   )r4   r   rP   s      r*   rR   zTrial.suggest_categorical\  s    l }}T#:7#KLLr,   c                R   t        | j                  j                        dkD  rt        d      	 t	        |      }	 t        |      }|dk  rt        d	| d
      || j                  j                  v rt        j                  d| d       y| j                  j                  | j                  ||       || j                  j                  |<   y# t
        t        f$ r dt        |       d}t        |      dw xY w# t
        t        f$ r dt        |       d}t        |      dw xY w)a0
  Report an objective function value for a given step.

        The reported values are used by the pruners to determine whether this trial should be
        pruned.

        .. seealso::
            Please refer to :class:`~optuna.pruners.BasePruner`.

        .. note::
            The reported value is converted to ``float`` type by applying ``float()``
            function internally. Thus, it accepts all float-like types (e.g., ``numpy.float32``).
            If the conversion fails, a ``TypeError`` is raised.

        .. note::
            If this method is called multiple times at the same ``step`` in a trial,
            the reported ``value`` only the first time is stored and the reported values
            from the second time are ignored.

        .. note::
            :func:`~optuna.trial.Trial.report` does not support multi-objective
            optimization.

        Example:

            Report intermediate scores of `SGDClassifier <https://scikit-learn.org/stable/modules/
            generated/sklearn.linear_model.SGDClassifier.html>`__ training.

            .. testcode::

                import numpy as np
                from sklearn.datasets import load_iris
                from sklearn.linear_model import SGDClassifier
                from sklearn.model_selection import train_test_split

                import optuna

                X, y = load_iris(return_X_y=True)
                X_train, X_valid, y_train, y_valid = train_test_split(X, y)


                def objective(trial):
                    clf = SGDClassifier(random_state=0)
                    for step in range(100):
                        clf.partial_fit(X_train, y_train, np.unique(y))
                        intermediate_value = clf.score(X_valid, y_valid)
                        trial.report(intermediate_value, step=step)
                        if trial.should_prune():
                            raise optuna.TrialPruned()

                    return clf.score(X_valid, y_valid)


                study = optuna.create_study(direction="maximize")
                study.optimize(objective, n_trials=3)


        Args:
            value:
                A value returned from the objective function.
            step:
                Step of the trial (e.g., Epoch of neural network training). Note that pruners
                assume that ``step`` starts at zero. For example,
                :class:`~optuna.pruners.MedianPruner` simply checks if ``step`` is less than
                ``n_warmup_steps`` as the warmup mechanism.
                ``step`` must be a positive integer.
        rJ   z?Trial.report is not supported for multi-objective optimization.z!The `value` argument is of type 'z' but supposed to be a float.Nz The `step` argument is of type 'z' but supposed to be an int.r   zThe `step` argument is z but cannot be negative.z2The reported value is ignored because this `step` z is already reported.)lenr   
directionsNotImplementedErrorfloat	TypeError
ValueErrortyperL   r   intermediate_valueswarningswarnr   set_trial_intermediate_valuer   )r(   valuer1   messages       r*   reportzTrial.report  s9   H tzz$$%)%Q 	/%LE	/t9D
 !86tf<TUVV4,,@@@MMDTFJ_` 11$..$N>C!!55d;/ :& 	/3DK=@]^  G$$.		/ :& 	/8dD`aGG$$.	/s   C C; +C8;+D&c                    t        | j                  j                        dkD  rt        d      | j	                         }| j                  j
                  j                  | j                  |      S )a  Suggest whether the trial should be pruned or not.

        The suggestion is made by a pruning algorithm associated with the trial and is based on
        previously reported values. The algorithm can be specified when constructing a
        :class:`~optuna.study.Study`.

        .. note::
            If no values have been reported, the algorithm cannot make meaningful suggestions.
            Similarly, if this method is called multiple times with the exact same set of reported
            values, the suggestions will be the same.

        .. seealso::
            Please refer to the example code in :func:`optuna.trial.Trial.report`.

        .. note::
            :func:`~optuna.trial.Trial.should_prune` does not support multi-objective
            optimization.

        Returns:
            A boolean value. If :obj:`True`, the trial should be pruned according to the
            configured pruning algorithm. Otherwise, the trial should continue.
        rJ   zETrial.should_prune is not supported for multi-objective optimization.)r\   r   r]   r^   _get_latest_trialprunerprune)r(   trials     r*   should_prunezTrial.should_prune  s\    0 tzz$$%)%W  &&(zz  &&tzz599r,   c                    | j                   j                  | j                  ||       || j                  j                  |<   y)a  Set user attributes to the trial.

        The user attributes in the trial can be access via :func:`optuna.trial.Trial.user_attrs`.

        .. seealso::

            See the recipe on :ref:`attributes`.

        Example:

            Save fixed hyperparameters of neural network training.

            .. testcode::

                import numpy as np
                from sklearn.datasets import load_iris
                from sklearn.model_selection import train_test_split
                from sklearn.neural_network import MLPClassifier

                import optuna

                X, y = load_iris(return_X_y=True)
                X_train, X_valid, y_train, y_valid = train_test_split(X, y, random_state=0)


                def objective(trial):
                    trial.set_user_attr("BATCHSIZE", 128)
                    momentum = trial.suggest_float("momentum", 0, 1.0)
                    clf = MLPClassifier(
                        hidden_layer_sizes=(100, 50),
                        batch_size=trial.user_attrs["BATCHSIZE"],
                        momentum=momentum,
                        solver="sgd",
                        random_state=0,
                    )
                    clf.fit(X_train, y_train)

                    return clf.score(X_valid, y_valid)


                study = optuna.create_study(direction="maximize")
                study.optimize(objective, n_trials=3)
                assert "BATCHSIZE" in study.best_trial.user_attrs.keys()
                assert study.best_trial.user_attrs["BATCHSIZE"] == 128


        Args:
            key:
                A key string of the attribute.
            value:
                A value of the attribute. The value should be JSON serializable.
        N)r   set_trial_user_attrr   r   
user_attrsr(   keyrg   s      r*   set_user_attrzTrial.set_user_attr  s5    l 	((eD49!!,,S1r,   z3.1.0z5.0.0c                    | j                   j                  | j                  ||       || j                  j                  |<   y)a  Set system attributes to the trial.

        Note that Optuna internally uses this method to save system messages such as failure
        reason of trials. Please use :func:`~optuna.trial.Trial.set_user_attr` to set users'
        attributes.

        Args:
            key:
                A key string of the attribute.
            value:
                A value of the attribute. The value should be JSON serializable.
        N)r   set_trial_system_attrr   r   r%   rs   s      r*   set_system_attrzTrial.set_system_attrR  s4     	**4>>3F6;!!..s3r,   c                   | j                   }| j                  }| j                         }||j                  v r4t        j                  |j                  |   |       |j
                  |   }|S | j                  ||      r| j                  |   }n|j                         rt        j                  |      }nj| j                  ||      r| j                  |   }nHt        j                  | j                  |      }| j                  j                  j!                  ||||      }|j#                  |      }|j%                  ||||       || j&                  j                  |<   || j&                  j
                  |<   |S r.   )r   r   rk   r    check_distribution_compatibilityparams_is_fixed_paramr'   single_get_single_value_is_relative_paramr0   r
   r   r   r    sample_independentto_internal_reprset_trial_paramr   )	r(   r7   r:   r   r)   rn   param_valuer   param_value_in_internal_reprs	            r*   r4   zTrial._suggestd  sT   ,,>>&&(5&&&::5;N;Nt;TVbc,,t,K( % ##D,7"006$$&+==lK((|<"2248--djj%@"jj00CC5$
 ,8+H+H+U(##Hd4PR^_<HD%%33D95@D%%,,T2r,   c                    || j                   vry| j                   |   }|j                  |      }|j                  |      }|s&t        j                  dj                  |||             y)NFzGFixed parameter '{}' with value {} is out of range for distribution {}.T)r'   r   	_containsrd   re   format)r(   r7   r:   r   r   	containeds         r*   r|   zTrial._is_fixed_param  sl    t)))((.'3'D'D['Q$ **+GH	MM''-vdK'N r,   c                   || j                   vry|| j                  vrt        dj                  |            | j                  |   }t	        j
                  ||       | j                   |   }|j                  |      }|j                  |      S )NFzpThe parameter '{}' was sampled by `sample_relative` method but it is not contained in the relative search space.)r0   r#   ra   r   r   rz   r   r   )r(   r7   r:   relative_distributionr   r   s         r*   r   zTrial._is_relative_param  s    t+++t111HHNt 
 !% : :4 @667Ll[**40'3'D'D['Q$%%&BCCr,   c                    | j                   j                  j                  ||      }||k7  r9t        j                  dj                  ||j                               t               y y )NaJ  Inconsistent parameter values for distribution with name "{}"! This might be a configuration mistake. Optuna allows to call the same distribution with the same name more than once in a trial. When the parameter values are inconsistent optuna only uses the values of the first call and ignores all following. Using these values: {})r   r   r&   rd   re   r   _asdictRuntimeWarning)r(   r7   r:   old_distributions       r*   r5   zTrial._check_distribution  sZ    44BBFFt\Z|+MM) *06F6N6N6P)Q	 ,r,   c                    t        j                   | j                        }t        | j                  | j                        |_        |S r.   )copyr   _LazyTrialSystemAttrsr   r   r%   )r(   latest_trials     r*   rk   zTrial._get_latest_trial  s8    yy!:!:;$9NNDLL%
! r,   c                T    t        j                  | j                  j                        S )zqReturn parameters to be optimized.

        Returns:
            A dictionary containing all parameters.
        )r   deepcopyr   r{   r(   s    r*   r{   zTrial.params  s     }}T66==>>r,   c                T    t        j                  | j                  j                        S )zReturn distributions of parameters to be optimized.

        Returns:
            A dictionary containing all distributions.
        )r   r   r   r   r   s    r*   r   zTrial.distributions  s     }}T66DDEEr,   c                T    t        j                  | j                  j                        S )zkReturn user attributes.

        Returns:
            A dictionary containing all user attributes.
        )r   r   r   rr   r   s    r*   rr   zTrial.user_attrs  s     }}T66AABBr,   c                r    t        j                  | j                  j                  | j                              S )zoReturn system attributes.

        Returns:
            A dictionary containing all system attributes.
        )r   r   r   get_trial_system_attrsr   r   s    r*   r%   zTrial.system_attrs  s&     }}T\\@@PQQr,   c                .    | j                   j                  S )zvReturn start datetime.

        Returns:
            Datetime where the :class:`~optuna.trial.Trial` started.
        )r   datetime_startr   s    r*   r   zTrial.datetime_start  s     ((777r,   c                .    | j                   j                  S )zxReturn trial's number which is consecutive and unique in a study.

        Returns:
            A trial number.
        )r   numberr   s    r*   r   zTrial.number  s     ((///r,   )r   z'optuna.study.Study'r)   rL   returnNone)r   zdict[str, Any])r7   strr8   r_   r9   r_   r1   zfloat | Noner2   boolr   r_   )r7   r   r8   r_   r9   r_   r   r_   )
r7   r   r8   r_   r9   r_   rG   r_   r   r_   )r7   r   r8   rL   r9   rL   r1   rL   r2   r   r   rL   )r7   r   rQ   zSequence[None]r   r   )r7   r   rQ   zSequence[bool]r   r   )r7   r   rQ   zSequence[int]r   rL   )r7   r   rQ   zSequence[float]r   r_   )r7   r   rQ   zSequence[str]r   r   )r7   r   rQ   zSequence[CategoricalChoiceType]r   r   )rg   r_   r1   rL   r   r   )r   r   )rt   r   rg   r   r   r   )r7   r   r:   r   r   r   )r7   r   r:   r   r   r   )r7   r   r:   r   r   r   )r   r   )r   zdict[str, BaseDistribution])r   zdatetime.datetime | None)r   rL   )"__name__
__module____qualname____doc__r+   propertyr0   r<   r   _suggest_deprecated_msgr   rC   rE   rH   r   r   rM   r   rR   ri   ro   ru   rx   r4   r|   r   r5   rk   r{   r   rr   r%   r   r   rO   r,   r*   r   r      s   $\" % % "WW W 	W W W 
Wr Wg,C,J,JPR,J,ST3 U3* Wg,C,J,JPa,J,bc= d=* Wg,C,J,JPa,J,bc; d;8 ;WX=>EWW!W),W7:WEIW	W YWr R RR RP PT TP P$$"A$	$ $6M6M"A6M	6MpcDJ:@7:r Wg&< '<">D" ? ? F F C C Wg&R ' R 8 8 0 0r,   r   c                  ,     e Zd Zd fdZd fdZ xZS )r   c                L    t         |           || _        || _        d| _        y )NF)superr+   r   r   _initialized)r(   r)   r   	__class__s      r*   r+   z_LazyTrialSystemAttrs.__init__  s$    !!r,   c                    |dk(  rE| j                   s9d| _         t        | 	  | j                  j	                  | j
                               t        |   |      S )NdataT)r   r   updater   r   r   __getattribute__)r(   rt   r   s     r*   r   z&_LazyTrialSystemAttrs.__getattribute__  sJ    &=$$$(!t}}CCDNNSTw',,r,   )r)   rL   r   zoptuna.storages.BaseStorager   r   )rt   r   r   r   )r   r   r   r+   r   __classcell__)r   s   @r*   r   r     s    "- -r,   r   )%
__future__r   collectionsr   collections.abcr   r   datetimetypingr   r   rd   optunar   r	   r
   optuna._convert_positional_argsr   optuna._deprecatedr   optuna.distributionsr   r   r   r   r   optuna.trialr   optuna.trial._baser   r   
get_loggerr   _loggerr   r   r   rO   r,   r*   <module>r      s|    "   $           C . 1 6 8 2 0 $ ; ( '

X
&< T0I T0n-H -r,   