Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into divya-mixup
Browse files Browse the repository at this point in the history
  • Loading branch information
divyashreepathihalli committed Jun 21, 2023
2 parents 1e6aef4 + 471f02f commit a301432
Show file tree
Hide file tree
Showing 9 changed files with 411 additions and 51 deletions.
4 changes: 1 addition & 3 deletions keras_core/backend/tensorflow/random.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,7 @@ def uniform(shape, minval=0.0, maxval=1.0, dtype=None, seed=None):

def categorical(logits, num_samples, dtype="int64", seed=None):
seed = tf_draw_seed(seed)
output = tf.random.stateless_categorical(
logits, num_samples, seed=seed
)
output = tf.random.stateless_categorical(logits, num_samples, seed=seed)
return tf.cast(output, dtype)


Expand Down
5 changes: 4 additions & 1 deletion keras_core/backend/torch/random.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,10 @@ def categorical(logits, num_samples, dtype="int32", seed=None):
dtype = to_torch_dtype(dtype)
generator = torch_seed_generator(seed, device=get_device())
return torch.multinomial(
logits, num_samples, replacement=True, generator=generator,
logits,
num_samples,
replacement=True,
generator=generator,
).type(dtype)


Expand Down
2 changes: 1 addition & 1 deletion keras_core/layers/core/masking.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def call(self, inputs):
try:
outputs._keras_mask = ops.squeeze(boolean_mask, axis=-1)
except AttributeError:
# outputs in a C type.
# tensor is a C type.
pass
return outputs

Expand Down
9 changes: 7 additions & 2 deletions keras_core/layers/preprocessing/tf_data_layer.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from tensorflow import nest

from keras_core import backend
from keras_core.layers.layer import Layer
from keras_core.utils import backend_utils
Expand All @@ -22,8 +24,11 @@ def __call__(self, inputs, **kwargs):
):
# We're in a TF graph, e.g. a tf.data pipeline.
self.backend.set_backend("tensorflow")
inputs = self.backend.convert_to_tensor(
inputs, dtype=self.compute_dtype
inputs = nest.map_structure(
lambda x: self.backend.convert_to_tensor(
x, dtype=self.compute_dtype
),
inputs,
)
switch_convert_input_args = False
if self._convert_input_args:
Expand Down
2 changes: 2 additions & 0 deletions keras_core/models/functional.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,8 @@ def __init__(self, inputs, outputs, name=None, **kwargs):
# We will convert directly (to the correct dtype per input).
self._convert_input_args = False
self._allow_non_tensor_positional_args = True
output_layers = [x._keras_history[0] for x in self.outputs]
self.output_names = [x.name for x in output_layers]
self._post_build()

@property
Expand Down
288 changes: 277 additions & 11 deletions keras_core/models/model_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,22 +8,47 @@
from keras_core.models.model import model_from_json


class ModelTest(testing.TestCase):
def _get_model(self):
input_a = Input(shape=(3,), batch_size=2, name="input_a")
input_b = Input(shape=(3,), batch_size=2, name="input_b")
x = input_a + input_b
x = layers.Dense(5)(x)
outputs = layers.Dense(4)(x)
model = Model([input_a, input_b], outputs)
return model
def _get_model():
input_a = Input(shape=(3,), batch_size=2, name="input_a")
input_b = Input(shape=(3,), batch_size=2, name="input_b")
x = input_a + input_b
x = layers.Dense(5)(x)
outputs = layers.Dense(4)(x)
model = Model([input_a, input_b], outputs)
return model


def _get_model_multi_outputs_list():
x = Input(shape=(3,), name="input_a")
output_a = layers.Dense(1, name="output_a")(x)
output_b = layers.Dense(1, name="output_b", activation="sigmoid")(x)
model = Model(x, [output_a, output_b])
return model


def _get_model_multi_outputs_list_no_output_names():
x = Input(shape=(3,), name="input_a")
output_a = layers.Dense(1)(x)
output_b = layers.Dense(1, activation="sigmoid")(x)
model = Model(x, [output_a, output_b])
return model


def _get_model_multi_outputs_dict():
x = Input(shape=(3,), name="input_a")
output_a = layers.Dense(1, name="output_a")(x)
output_b = layers.Dense(1, name="output_b", activation="sigmoid")(x)
model = Model(x, {"output_a": output_a, "output_b": output_b})
return model


class ModelTest(testing.TestCase):
def test_functional_rerouting(self):
model = self._get_model()
model = _get_model()
self.assertTrue(isinstance(model, Functional))

def test_json_serialization(self):
model = self._get_model()
model = _get_model()
json_string = model.to_json()
new_model = model_from_json(json_string)
self.assertEqual(json_string, new_model.to_json())
Expand Down Expand Up @@ -65,3 +90,244 @@ def call(self, x):
config, custom_objects={"CustomDense": CustomDense}
)
self.assertTrue(isinstance(new_model, Functional))

def test_functional_list_outputs_list_losses(self):
model = _get_model_multi_outputs_list()
self.assertTrue(isinstance(model, Functional))
x = np.random.rand(8, 3)
y1 = np.random.rand(8, 1)
y2 = np.random.randint(0, 2, (8, 1))
model.compile(
optimizer="sgd",
loss=["mean_squared_error", "binary_crossentropy"],
metrics=[
["mean_squared_error"],
["mean_squared_error", "accuracy"],
],
)
# Fit the model to make sure compile_metrics are built
hist = model.fit(x, (y1, y2), batch_size=2, epochs=1, verbose=0)
hist_keys = sorted(hist.history.keys())
# TODO `tf.keras` also outputs individual losses for outputs
ref_keys = sorted(
[
"loss",
# "output_a_loss",
"output_a_mean_squared_error",
"output_b_accuracy",
# "output_b_loss",
"output_b_mean_squared_error",
]
)
self.assertListEqual(hist_keys, ref_keys)

def test_functional_dict_outputs_dict_losses(self):
model = _get_model_multi_outputs_dict()
self.assertTrue(isinstance(model, Functional))
x = np.random.rand(8, 3)
y1 = np.random.rand(8, 1)
y2 = np.random.randint(0, 2, (8, 1))
model.compile(
optimizer="sgd",
loss={
"output_a": "mean_squared_error",
"output_b": "binary_crossentropy",
},
metrics={
"output_a": ["mean_squared_error"],
"output_b": ["mean_squared_error", "accuracy"],
},
)
# Fit the model to make sure compile_metrics are built
hist = model.fit(
x,
{"output_a": y1, "output_b": y2},
batch_size=2,
epochs=1,
verbose=0,
)
hist_keys = sorted(hist.history.keys())
# TODO `tf.keras` also outputs individual losses for outputs
ref_keys = sorted(
[
"loss",
# "output_a_loss",
"output_a_mean_squared_error",
"output_b_accuracy",
# "output_b_loss",
"output_b_mean_squared_error",
]
)
self.assertListEqual(hist_keys, ref_keys)

def test_functional_list_outputs_dict_losses_metrics(self):
model = _get_model_multi_outputs_list()
self.assertTrue(isinstance(model, Functional))
x = np.random.rand(8, 3)
y1 = np.random.rand(8, 1)
y2 = np.random.randint(0, 2, (8, 1))
model.compile(
optimizer="sgd",
loss={
"output_a": "mean_squared_error",
"output_b": "binary_crossentropy",
},
metrics={
"output_a": ["mean_squared_error"],
"output_b": ["mean_squared_error", "accuracy"],
},
)
# Fit the model to make sure compile_metrics are built
hist = model.fit(x, (y1, y2), batch_size=2, epochs=1, verbose=0)
hist_keys = sorted(hist.history.keys())
# TODO `tf.keras` also outputs individual losses for outputs
ref_keys = sorted(
[
"loss",
# "output_a_loss",
"output_a_mean_squared_error",
"output_b_accuracy",
# "output_b_loss",
"output_b_mean_squared_error",
]
)
self.assertListEqual(hist_keys, ref_keys)

def test_functional_list_outputs_dict_losses_partial_metrics(self):
model = _get_model_multi_outputs_list()
self.assertTrue(isinstance(model, Functional))
x = np.random.rand(8, 3)
y1 = np.random.rand(8, 1)
y2 = np.random.randint(0, 2, (8, 1))
model.compile(
optimizer="sgd",
loss={
"output_a": "mean_squared_error",
"output_b": "binary_crossentropy",
},
metrics={
"output_b": ["mean_squared_error", "accuracy"],
},
)
# Fit the model to make sure compile_metrics are built
hist = model.fit(x, (y1, y2), batch_size=2, epochs=1, verbose=0)
hist_keys = sorted(hist.history.keys())
# TODO `tf.keras` also outputs individual losses for outputs
ref_keys = sorted(
[
"loss",
# "output_a_loss",
"output_b_accuracy",
# "output_b_loss",
"output_b_mean_squared_error",
]
)
self.assertListEqual(hist_keys, ref_keys)

def test_functional_list_outputs_dict_losses_invalid_keys(self):
model = _get_model_multi_outputs_list()
self.assertTrue(isinstance(model, Functional))
x = np.random.rand(8, 3)
y1 = np.random.rand(8, 1)
y2 = np.random.randint(0, 2, (8, 1))
model.compile(
optimizer="sgd",
loss={
"output_a": "mean_squared_error",
"output_c": "binary_crossentropy",
},
)
# Fit the model to make sure compile_metrics are built
with self.assertRaisesRegex(
ValueError,
"In the dict argument `loss`, "
"key 'output_c' does not correspond to any model output",
):
model.fit(x, (y1, y2), batch_size=2, epochs=1, verbose=0)

def test_functional_list_outputs_dict_losses_no_output_names(self):
model = _get_model_multi_outputs_list_no_output_names()
self.assertTrue(isinstance(model, Functional))
x = np.random.rand(8, 3)
y1 = np.random.rand(8, 1)
y2 = np.random.randint(0, 2, (8, 1))
model.compile(
optimizer="sgd",
loss={"output_a": "mean_squared_error"},
)
# Fit the model to make sure compile_metrics are built
with self.assertRaisesRegex(
ValueError,
"In the dict argument `loss`, "
"key 'output_a' does not correspond to any model output",
):
model.fit(x, (y1, y2), batch_size=2, epochs=1, verbose=0)

def test_functional_list_outputs_dict_metrics_invalid_keys(self):
model = _get_model_multi_outputs_list()
self.assertTrue(isinstance(model, Functional))
x = np.random.rand(8, 3)
y1 = np.random.rand(8, 1)
y2 = np.random.randint(0, 2, (8, 1))
model.compile(
optimizer="sgd",
loss={
"output_a": "mean_squared_error",
"output_b": "binary_crossentropy",
},
metrics={
"output_c": ["mean_squared_error", "accuracy"],
},
)
# Fit the model to make sure compile_metrics are built
with self.assertRaisesRegex(
ValueError,
"In the dict argument `metrics`, "
"key 'output_c' does not correspond to any model output",
):
model.fit(x, (y1, y2), batch_size=2, epochs=1, verbose=0)

def test_functional_dict_outputs_dict_losses_invalid_keys(self):
model = _get_model_multi_outputs_dict()
self.assertTrue(isinstance(model, Functional))
x = np.random.rand(8, 3)
y1 = np.random.rand(8, 1)
y2 = np.random.randint(0, 2, (8, 1))
model.compile(
optimizer="sgd",
loss={
"output_a": "mean_squared_error",
"output_c": "binary_crossentropy",
},
)
# Fit the model to make sure compile_metrics are built
with self.assertRaisesRegex(
ValueError,
"In the dict argument `loss`, "
"key 'output_c' does not correspond to any model output",
):
model.fit(x, (y1, y2), batch_size=2, epochs=1, verbose=0)

def test_functional_dict_outputs_dict_metrics_invalid_keys(self):
model = _get_model_multi_outputs_dict()
self.assertTrue(isinstance(model, Functional))
x = np.random.rand(8, 3)
y1 = np.random.rand(8, 1)
y2 = np.random.randint(0, 2, (8, 1))
model.compile(
optimizer="sgd",
loss={
"output_a": "mean_squared_error",
"output_b": "binary_crossentropy",
},
metrics={
"output_c": ["mean_squared_error", "accuracy"],
},
)
# Fit the model to make sure compile_metrics are built
with self.assertRaisesRegex(
ValueError,
"In the dict argument `metrics`, "
"key 'output_c' does not correspond to any model output",
):
model.fit(x, (y1, y2), batch_size=2, epochs=1, verbose=0)
Loading

0 comments on commit a301432

Please sign in to comment.