415be78cc0d1275a29d0ceda550d0d7a7a5224ea,python/ray/util/sgd/torch/torch_trainer.py,TorchTrainer,__init__,#,171

Before Change


                 "For more information, see "
                 "https://github.com/pytorch/examples/issues/467."))

        if not (callable(model_creator) and callable(optimizer_creator)):
            raise ValueError(
                "Must provide a callable model_creator and optimizer_creator.")

        if num_replicas is not None:
            raise DeprecationWarning(
                "num_replicas is deprecated. Use num_workers instead.")

        if batch_size is not None:
            raise DeprecationWarning(
                "batch_size is deprecated. Use config={"batch_size": N} "
                "specify a batch size for each worker or "
                "config={ray.util.sgd.utils.BATCH_SIZE: N} to specify a "
                "batch size to be used across all workers.")

        if data_loader_args:
            raise ValueError(
                "data_loader_args is deprecated. You can return a "
                "torch.utils.data.DataLoader in data_creator. Ray will "
                "automatically set a DistributedSampler if a DataLoader is "
                "returned and num_workers > 1.")

        self.model_creator = model_creator
        self.optimizer_creator = optimizer_creator
        self.loss_creator = loss_creator
        self.data_creator = data_creator
        self.scheduler_creator = scheduler_creator
        self.training_operator_cls = training_operator_cls

        if not training_operator_cls and not loss_creator:
            raise ValueError("If a loss_creator is not provided, you must "

After Change


                "batch size to be used across all workers.")

        if serialize_data_creation is True:
            if log_once("serialize_data_creation"):
                logging.warning(
                    "serialize_data_creation is deprecated and will be "
                    "ignored. If you require serialized data loading you "
                    "should implement this in TrainingOperator.setup. "
                    "You may find FileLock useful here.")

        if data_loader_args:
            raise DeprecationWarning(
                "data_loader_args is deprecated. You can return a "
                "torch.utils.data.DataLoader in data_creator. Ray will "
Italian Trulli
In pattern: SUPERPATTERN

Frequency: 5

Non-data size: 7

Instances


Project Name: ray-project/ray
Commit Name: 415be78cc0d1275a29d0ceda550d0d7a7a5224ea
Time: 2020-09-08
Author: amogkam@users.noreply.github.com
File Name: python/ray/util/sgd/torch/torch_trainer.py
Class Name: TorchTrainer
Method Name: __init__


Project Name: IBM/adversarial-robustness-toolbox
Commit Name: 9a3a5f4ecfc2bdb2e6b34feec20869528d5d7661
Time: 2020-09-28
Author: M.N.Tran@ibm.com
File Name: art/estimators/classification/pytorch.py
Class Name: PyTorchClassifier
Method Name: __init__


Project Name: pfnet/optuna
Commit Name: 1065e85b335904177060014b1b6775ecd975a643
Time: 2020-03-13
Author: sano@preferred.jp
File Name: optuna/samplers/grid.py
Class Name: GridSampler
Method Name: sample_relative


Project Name: IBM/adversarial-robustness-toolbox
Commit Name: d5801e31dafe0320109862e9cd216213e62e92b5
Time: 2020-06-25
Author: M.N.Tran@ibm.com
File Name: art/estimators/object_detection/tensorflow_faster_rcnn.py
Class Name: TensorFlowFasterRCNN
Method Name: __init__


Project Name: elfi-dev/elfi
Commit Name: c61a48cc0bdc8c5f748495a7e2567789cb748cad
Time: 2017-06-30
Author: jarno.lintusaari@aalto.fi
File Name: elfi/methods/parameter_inference.py
Class Name: BayesianOptimization
Method Name: set_objective