aboutsummaryrefslogtreecommitdiffhomepage
path: root/tensorflow/contrib/constrained_optimization/python/constrained_optimizer.py
diff options
context:
space:
mode:
Diffstat (limited to 'tensorflow/contrib/constrained_optimization/python/constrained_optimizer.py')
-rw-r--r--tensorflow/contrib/constrained_optimization/python/constrained_optimizer.py128
1 files changed, 97 insertions, 31 deletions
diff --git a/tensorflow/contrib/constrained_optimization/python/constrained_optimizer.py b/tensorflow/contrib/constrained_optimization/python/constrained_optimizer.py
index 8055545366..0b79bdf7c0 100644
--- a/tensorflow/contrib/constrained_optimization/python/constrained_optimizer.py
+++ b/tensorflow/contrib/constrained_optimization/python/constrained_optimizer.py
@@ -55,20 +55,21 @@ class ConstrainedOptimizer(object):
"""Returns the `tf.train.Optimizer` used for optimization."""
return self._optimizer
- def minimize_unconstrained(self,
- minimization_problem,
- global_step=None,
- var_list=None,
- gate_gradients=train_optimizer.Optimizer.GATE_OP,
- aggregation_method=None,
- colocate_gradients_with_ops=False,
- name=None,
- grad_loss=None):
- """Returns an `Op` for minimizing the unconstrained problem.
+ @abc.abstractmethod
+ def _minimize_constrained(self,
+ minimization_problem,
+ global_step=None,
+ var_list=None,
+ gate_gradients=train_optimizer.Optimizer.GATE_OP,
+ aggregation_method=None,
+ colocate_gradients_with_ops=False,
+ name=None,
+ grad_loss=None):
+ """Version of `minimize_constrained` to be overridden by subclasses.
- Unlike `minimize_constrained`, this function ignores the `constraints` (and
- `proxy_constraints`) portion of the minimization problem entirely, and only
- minimizes `objective`.
+ Implementations of this method should ignore the `pre_train_ops` property of
+ the `minimization_problem`. The public `minimize_constrained` method will
+ take care of executing these before the returned train_op.
Args:
minimization_problem: ConstrainedMinimizationProblem, the problem to
@@ -83,19 +84,10 @@ class ConstrainedOptimizer(object):
grad_loss: as in `tf.train.Optimizer`'s `minimize` method.
Returns:
- TensorFlow Op.
+ `Operation`, the train_op.
"""
- return self.optimizer.minimize(
- minimization_problem.objective,
- global_step=global_step,
- var_list=var_list,
- gate_gradients=gate_gradients,
- aggregation_method=aggregation_method,
- colocate_gradients_with_ops=colocate_gradients_with_ops,
- name=name,
- grad_loss=grad_loss)
+ pass
- @abc.abstractmethod
def minimize_constrained(self,
minimization_problem,
global_step=None,
@@ -105,7 +97,7 @@ class ConstrainedOptimizer(object):
colocate_gradients_with_ops=False,
name=None,
grad_loss=None):
- """Returns an `Op` for minimizing the constrained problem.
+ """Returns an `Operation` for minimizing the constrained problem.
Unlike `minimize_unconstrained`, this function attempts to find a solution
that minimizes the `objective` portion of the minimization problem while
@@ -124,9 +116,83 @@ class ConstrainedOptimizer(object):
grad_loss: as in `tf.train.Optimizer`'s `minimize` method.
Returns:
- TensorFlow Op.
+ `Operation`, the train_op.
"""
- pass
+
+ def train_op_callback():
+ return self._minimize_constrained(
+ minimization_problem,
+ global_step=global_step,
+ var_list=var_list,
+ gate_gradients=gate_gradients,
+ aggregation_method=aggregation_method,
+ colocate_gradients_with_ops=colocate_gradients_with_ops,
+ name=name,
+ grad_loss=grad_loss)
+
+ # If we have pre_train_ops, use tf.control_dependencies() to ensure that
+ # they execute before the train_op.
+ pre_train_ops = minimization_problem.pre_train_ops
+ if pre_train_ops:
+ with ops.control_dependencies(pre_train_ops):
+ train_op = train_op_callback()
+ else:
+ train_op = train_op_callback()
+
+ return train_op
+
+ def minimize_unconstrained(self,
+ minimization_problem,
+ global_step=None,
+ var_list=None,
+ gate_gradients=train_optimizer.Optimizer.GATE_OP,
+ aggregation_method=None,
+ colocate_gradients_with_ops=False,
+ name=None,
+ grad_loss=None):
+ """Returns an `Operation` for minimizing the unconstrained problem.
+
+ Unlike `minimize_constrained`, this function ignores the `constraints` (and
+ `proxy_constraints`) portion of the minimization problem entirely, and only
+ minimizes `objective`.
+
+ Args:
+ minimization_problem: ConstrainedMinimizationProblem, the problem to
+ optimize.
+ global_step: as in `tf.train.Optimizer`'s `minimize` method.
+ var_list: as in `tf.train.Optimizer`'s `minimize` method.
+ gate_gradients: as in `tf.train.Optimizer`'s `minimize` method.
+ aggregation_method: as in `tf.train.Optimizer`'s `minimize` method.
+ colocate_gradients_with_ops: as in `tf.train.Optimizer`'s `minimize`
+ method.
+ name: as in `tf.train.Optimizer`'s `minimize` method.
+ grad_loss: as in `tf.train.Optimizer`'s `minimize` method.
+
+ Returns:
+ `Operation`, the train_op.
+ """
+
+ def train_op_callback():
+ return self.optimizer.minimize(
+ minimization_problem.objective,
+ global_step=global_step,
+ var_list=var_list,
+ gate_gradients=gate_gradients,
+ aggregation_method=aggregation_method,
+ colocate_gradients_with_ops=colocate_gradients_with_ops,
+ name=name,
+ grad_loss=grad_loss)
+
+ # If we have pre_train_ops, use tf.control_dependencies() to ensure that
+ # they execute before the train_op.
+ pre_train_ops = minimization_problem.pre_train_ops
+ if pre_train_ops:
+ with ops.control_dependencies(pre_train_ops):
+ train_op = train_op_callback()
+ else:
+ train_op = train_op_callback()
+
+ return train_op
def minimize(self,
minimization_problem,
@@ -138,7 +204,7 @@ class ConstrainedOptimizer(object):
colocate_gradients_with_ops=False,
name=None,
grad_loss=None):
- """Returns an `Op` for minimizing the constrained problem.
+ """Returns an `Operation` for minimizing the constrained problem.
This method combines the functionality of `minimize_unconstrained` and
`minimize_constrained`. If global_step < unconstrained_steps, it will
@@ -164,14 +230,14 @@ class ConstrainedOptimizer(object):
grad_loss: as in `tf.train.Optimizer`'s `minimize` method.
Returns:
- TensorFlow Op.
+ `Operation`, the train_op.
Raises:
ValueError: If unconstrained_steps is provided, but global_step is not.
"""
def unconstrained_fn():
- """Returns an `Op` for minimizing the unconstrained problem."""
+ """Returns an `Operation` for minimizing the unconstrained problem."""
return self.minimize_unconstrained(
minimization_problem=minimization_problem,
global_step=global_step,
@@ -183,7 +249,7 @@ class ConstrainedOptimizer(object):
grad_loss=grad_loss)
def constrained_fn():
- """Returns an `Op` for minimizing the constrained problem."""
+ """Returns an `Operation` for minimizing the constrained problem."""
return self.minimize_constrained(
minimization_problem=minimization_problem,
global_step=global_step,