Skip to content

Commit

Permalink
Merge pull request #1437 from rstudio/retether-3.3.2
Browse files Browse the repository at this point in the history
Retether 3.3.2
  • Loading branch information
t-kalinowski authored Apr 23, 2024
2 parents 9bc2900 + c6b615d commit 80af555
Show file tree
Hide file tree
Showing 1,050 changed files with 10,170 additions and 4,905 deletions.
5 changes: 5 additions & 0 deletions .tether/man/Layer.txt
Original file line number Diff line number Diff line change
Expand Up @@ -442,6 +442,9 @@ class Layer(keras.src.backend.tensorflow.layer.TFLayer, keras.src.ops.operation.
| losses
| List of scalar losses from `add_loss`, regularizers and sublayers.
|
| metrics
| List of all metrics.
|
| metrics_variables
| List of all metric variables.
|
Expand Down Expand Up @@ -488,6 +491,8 @@ class Layer(keras.src.backend.tensorflow.layer.TFLayer, keras.src.ops.operation.
| ----------------------------------------------------------------------
| Data descriptors defined here:
|
| dtype_policy
|
| input_spec
|
| supports_masking
Expand Down
11 changes: 10 additions & 1 deletion .tether/man/clear_session.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
__signature__
keras.utils.clear_session()
keras.utils.clear_session(free_memory=True)
__doc__
Resets all state generated by Keras.

Expand All @@ -11,6 +11,14 @@ an increasing amount of memory over time, and you may want to clear it.
Calling `clear_session()` releases the global state: this helps avoid
clutter from old models and layers, especially when memory is limited.

Args:
free_memory: Whether to call Python garbage collection.
It's usually a good practice to call it to make sure
memory used by deleted objects is immediately freed.
However, it may take a few seconds to execute, so
when using `clear_session()` in a short loop,
you may want to skip it.

Example 1: calling `clear_session()` when creating models in a loop

```python
Expand Down Expand Up @@ -39,3 +47,4 @@ dense_10
>>> new_layer = keras.layers.Dense(10)
>>> print(new_layer.name)
dense

34 changes: 17 additions & 17 deletions .tether/man/keras.applications.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
convnext: Module(keras.applications.convnext)
convnext: Module(keras.api.applications.convnext)
ConvNeXtBase(
model_name='convnext_base',
include_top=True,
Expand Down Expand Up @@ -54,7 +54,7 @@ ConvNeXtXLarge(
classes=1000,
classifier_activation='softmax'
)
densenet: Module(keras.applications.densenet)
densenet: Module(keras.api.applications.densenet)
DenseNet121(
include_top=True,
weights='imagenet',
Expand Down Expand Up @@ -82,8 +82,8 @@ DenseNet201(
classes=1000,
classifier_activation='softmax'
)
efficientnet: Module(keras.applications.efficientnet)
efficientnet_v2: Module(keras.applications.efficientnet_v2)
efficientnet: Module(keras.api.applications.efficientnet)
efficientnet_v2: Module(keras.api.applications.efficientnet_v2)
EfficientNetB0(
include_top=True,
weights='imagenet',
Expand Down Expand Up @@ -234,9 +234,9 @@ EfficientNetV2S(
classifier_activation='softmax',
include_preprocessing=True
)
imagenet_utils: Module(keras.applications.imagenet_utils)
inception_resnet_v2: Module(keras.applications.inception_resnet_v2)
inception_v3: Module(keras.applications.inception_v3)
imagenet_utils: Module(keras.api.applications.imagenet_utils)
inception_resnet_v2: Module(keras.api.applications.inception_resnet_v2)
inception_v3: Module(keras.api.applications.inception_v3)
InceptionResNetV2(
include_top=True,
weights='imagenet',
Expand All @@ -255,7 +255,7 @@ InceptionV3(
classes=1000,
classifier_activation='softmax'
)
mobilenet: Module(keras.applications.mobilenet)
mobilenet: Module(keras.api.applications.mobilenet)
MobileNet(
input_shape=None,
alpha=1.0,
Expand All @@ -268,8 +268,8 @@ MobileNet(
classes=1000,
classifier_activation='softmax'
)
mobilenet_v2: Module(keras.applications.mobilenet_v2)
mobilenet_v3: Module(keras.applications.mobilenet_v3)
mobilenet_v2: Module(keras.api.applications.mobilenet_v2)
mobilenet_v3: Module(keras.api.applications.mobilenet_v3)
MobileNetV2(
input_shape=None,
alpha=1.0,
Expand Down Expand Up @@ -306,7 +306,7 @@ MobileNetV3Small(
classifier_activation='softmax',
include_preprocessing=True
)
nasnet: Module(keras.applications.nasnet)
nasnet: Module(keras.api.applications.nasnet)
NASNetLarge(
input_shape=None,
include_top=True,
Expand All @@ -325,8 +325,8 @@ NASNetMobile(
classes=1000,
classifier_activation='softmax'
)
resnet: Module(keras.applications.resnet)
resnet_v2: Module(keras.applications.resnet_v2)
resnet: Module(keras.api.applications.resnet)
resnet_v2: Module(keras.api.applications.resnet_v2)
ResNet101(
include_top=True,
weights='imagenet',
Expand Down Expand Up @@ -363,7 +363,7 @@ ResNet152V2(
classes=1000,
classifier_activation='softmax'
)
resnet50: Module(keras.applications.resnet50)
resnet50: Module(keras.api.applications.resnet50)
ResNet50(
include_top=True,
weights='imagenet',
Expand All @@ -382,7 +382,7 @@ ResNet50V2(
classes=1000,
classifier_activation='softmax'
)
vgg16: Module(keras.applications.vgg16)
vgg16: Module(keras.api.applications.vgg16)
VGG16(
include_top=True,
weights='imagenet',
Expand All @@ -392,7 +392,7 @@ VGG16(
classes=1000,
classifier_activation='softmax'
)
vgg19: Module(keras.applications.vgg19)
vgg19: Module(keras.api.applications.vgg19)
VGG19(
include_top=True,
weights='imagenet',
Expand All @@ -402,7 +402,7 @@ VGG19(
classes=1000,
classifier_activation='softmax'
)
xception: Module(keras.applications.xception)
xception: Module(keras.api.applications.xception)
Xception(
include_top=True,
weights='imagenet',
Expand Down
2 changes: 1 addition & 1 deletion .tether/man/keras.backend.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
backend()
clear_session()
clear_session(free_memory=True)
epsilon()
floatx()
get_uid(prefix='')
Expand Down
16 changes: 8 additions & 8 deletions .tether/man/keras.datasets.txt
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
boston_housing: Module(keras.datasets.boston_housing)
california_housing: Module(keras.datasets.california_housing)
cifar10: Module(keras.datasets.cifar10)
cifar100: Module(keras.datasets.cifar100)
fashion_mnist: Module(keras.datasets.fashion_mnist)
imdb: Module(keras.datasets.imdb)
mnist: Module(keras.datasets.mnist)
reuters: Module(keras.datasets.reuters)
boston_housing: Module(keras.api.datasets.boston_housing)
california_housing: Module(keras.api.datasets.california_housing)
cifar10: Module(keras.api.datasets.cifar10)
cifar100: Module(keras.api.datasets.cifar100)
fashion_mnist: Module(keras.api.datasets.fashion_mnist)
imdb: Module(keras.api.datasets.imdb)
mnist: Module(keras.api.datasets.mnist)
reuters: Module(keras.api.datasets.reuters)

10 changes: 9 additions & 1 deletion .tether/man/keras.dtype_policies.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,12 @@
DTypePolicy(name)
deserialize(config, custom_objects=None)
DTypePolicy(
name,
*args,
**kwargs
)
FloatDTypePolicy(name)
get(identifier)
QuantizedDTypePolicy(name)
QuantizedFloat8DTypePolicy(name, amax_history_length=1024)
serialize(dtype_policy)

4 changes: 4 additions & 0 deletions .tether/man/keras.layers.txt
Original file line number Diff line number Diff line change
Expand Up @@ -537,6 +537,7 @@ Embedding(
embeddings_regularizer=None,
embeddings_constraint=None,
mask_zero=False,
weights=None,
lora_rank=None,
**kwargs
)
Expand Down Expand Up @@ -1007,6 +1008,9 @@ Resizing(
width,
interpolation='bilinear',
crop_to_aspect_ratio=False,
pad_to_aspect_ratio=False,
fill_mode='constant',
fill_value=0.0,
data_format=None,
**kwargs
)
Expand Down
2 changes: 1 addition & 1 deletion .tether/man/keras.legacy.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
saving: Module(keras.legacy.saving)
saving: Module(keras.api.legacy.saving)

12 changes: 12 additions & 0 deletions .tether/man/keras.losses.txt
Original file line number Diff line number Diff line change
Expand Up @@ -129,4 +129,16 @@ SparseCategoricalCrossentropy(
)
squared_hinge(y_true, y_pred)
SquaredHinge(reduction='sum_over_batch_size', name='squared_hinge')
tversky(
y_true,
y_pred,
alpha=0.5,
beta=0.5
)
Tversky(
alpha=0.5,
beta=0.5,
reduction='sum_over_batch_size',
name='tversky'
)

12 changes: 10 additions & 2 deletions .tether/man/keras.mixed_precision.txt
Original file line number Diff line number Diff line change
@@ -1,13 +1,21 @@
dtype_policy()
DTypePolicy(name)
DTypePolicy(
name,
*args,
**kwargs
)
global_policy()
LossScaleOptimizer(
inner_optimizer,
initial_scale=32768.0,
dynamic_growth_steps=2000,
**kwargs
)
Policy(name)
Policy(
name,
*args,
**kwargs
)
set_dtype_policy(policy)
set_global_policy(policy)

5 changes: 5 additions & 0 deletions .tether/man/keras.ops.image.txt
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,11 @@ resize(
size,
interpolation='bilinear',
antialias=False,
crop_to_aspect_ratio=False,
pad_to_aspect_ratio=False,
fill_mode='constant',
fill_value=0.0,
data_format='channels_last'
)
rgb_to_grayscale(image, data_format='channels_last')

9 changes: 9 additions & 0 deletions .tether/man/keras.ops.nn.txt
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,15 @@ conv_transpose(
data_format=None,
dilation_rate=1
)
ctc_decode(
inputs,
sequence_lengths,
strategy,
beam_width=100,
top_paths=1,
merge_repeated=True,
mask_index=None
)
ctc_loss(
target,
output,
Expand Down
30 changes: 27 additions & 3 deletions .tether/man/keras.ops.numpy.txt
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,16 @@ arcsinh(x)
arctan(x)
arctan2(x1, x2)
arctanh(x)
argmax(x, axis=None)
argmin(x, axis=None)
argmax(
x,
axis=None,
keepdims=False
)
argmin(
x,
axis=None,
keepdims=False
)
argsort(x, axis=-1)
array(x, dtype=None)
average(
Expand Down Expand Up @@ -204,7 +212,12 @@ moveaxis(
destination
)
multiply(x1, x2)
nan_to_num(x)
nan_to_num(
x,
nan=0.0,
posinf=None,
neginf=None
)
ndim(x)
negative(x)
nonzero(x)
Expand Down Expand Up @@ -247,6 +260,11 @@ roll(
axis=None
)
round(x, decimals=0)
select(
condlist,
choicelist,
default=0
)
sign(x)
sin(x)
sinh(x)
Expand Down Expand Up @@ -317,6 +335,12 @@ var(
keepdims=False
)
vdot(x1, x2)
vectorize(
pyfunc,
*,
excluded=None,
signature=None
)
vstack(xs)
where(
condition,
Expand Down
Loading

0 comments on commit 80af555

Please sign in to comment.