Can a TensorFlow Hub module be used in TensorFlow 2.0?
Asked Answered
R

3

23

I tried running this code in TensorFlow 2.0 (alpha):

import tensorflow_hub as hub

@tf.function
def elmo(texts):
    elmo_module = hub.Module("https://tfhub.dev/google/elmo/2", trainable=True)
    return elmo_module(texts, signature="default", as_dict=True)

embeds = elmo(tf.constant(["the cat is on the mat",
                           "dogs are in the fog"]))

But I got this error:

---------------------------------------------------------------------------
RuntimeError                              Traceback (most recent call last)
<ipython-input-1-c7f14c7ed0e9> in <module>
      9
     10 elmo(tf.constant(["the cat is on the mat",
---> 11                   "dogs are in the fog"]))

.../tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
    417     # This is the first call of __call__, so we have to initialize.
    418     initializer_map = {}
--> 419     self._initialize(args, kwds, add_initializers_to=initializer_map)
    420     if self._created_variables:
    421       try:

.../tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
    361     self._concrete_stateful_fn = (
    362         self._stateful_fn._get_concrete_function_internal_garbage_collected(  # pylint: disable=protected-access
--> 363             *args, **kwds))
    364
    365     def invalid_creator_scope(*unused_args, **unused_kwds):

.../tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
   1322     if self.input_signature:
   1323       args, kwargs = None, None
-> 1324     graph_function, _, _ = self._maybe_define_function(args, kwargs)
   1325     return graph_function
   1326

.../tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
   1585           or call_context_key not in self._function_cache.missed):
   1586         self._function_cache.missed.add(call_context_key)
-> 1587         graph_function = self._create_graph_function(args, kwargs)
   1588         self._function_cache.primary[cache_key] = graph_function
   1589         return graph_function, args, kwargs

.../tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
   1518             arg_names=arg_names,
   1519             override_flat_arg_shapes=override_flat_arg_shapes,
-> 1520             capture_by_value=self._capture_by_value),
   1521         self._function_attributes)
   1522

.../tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
    705                                           converted_func)
    706
--> 707       func_outputs = python_func(*func_args, **func_kwargs)
    708
    709       # invariant: `func_outputs` contains only Tensors, IndexedSlices,

.../tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
    314         # __wrapped__ allows AutoGraph to swap in a converted function. We give
    315         # the function a weak reference to itself to avoid a reference cycle.
--> 316         return weak_wrapped_fn().__wrapped__(*args, **kwds)
    317     weak_wrapped_fn = weakref.ref(wrapped_fn)
    318

.../tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
    697                   optional_features=autograph_options,
    698                   force_conversion=True,
--> 699               ), args, kwargs)
    700
    701         # Wrapping around a decorator allows checks like tf_inspect.getargspec

.../tensorflow/python/autograph/impl/api.py in converted_call(f, owner, options, args, kwargs)
    355
    356   if kwargs is not None:
--> 357     result = converted_f(*effective_args, **kwargs)
    358   else:
    359     result = converted_f(*effective_args)

/var/folders/wy/h39t6kb11pnbb0pzhksd_fqh0000gn/T/tmp4v3g2d_1.py in tf__elmo(texts)
     11       retval_ = None
     12       print('Eager:', ag__.converted_call('executing_eagerly', tf, ag__.ConversionOptions(recursive=True, force_conversion=False, optional_features=(), internal_convert_user_code=True), (), None))
---> 13       elmo_module = ag__.converted_call('Module', hub, ag__.ConversionOptions(recursive=True, force_conversion=False, optional_features=(), internal_convert_user_code=True), ('https://tfhub.dev/google/elmo/2',), {'trainable': True})
     14       do_return = True
     15       retval_ = ag__.converted_call(elmo_module, None, ag__.ConversionOptions(recursive=True, force_conversion=False, optional_features=(), internal_convert_user_code=True), (texts,), {'signature': 'default', 'as_dict': True})

.../tensorflow/python/autograph/impl/api.py in converted_call(f, owner, options, args, kwargs)
    252   if tf_inspect.isclass(f):
    253     logging.log(2, 'Permanently whitelisted: %s: constructor', f)
--> 254     return _call_unconverted(f, args, kwargs)
    255
    256   # Other built-in modules are permanently whitelisted.

.../tensorflow/python/autograph/impl/api.py in _call_unconverted(f, args, kwargs)
    174
    175   if kwargs is not None:
--> 176     return f(*args, **kwargs)
    177   else:
    178     return f(*args)

.../tensorflow_hub/module.py in __init__(self, spec, trainable, name, tags)
    167           name=self._name,
    168           trainable=self._trainable,
--> 169           tags=self._tags)
    170       # pylint: enable=protected-access
    171

.../tensorflow_hub/native_module.py in _create_impl(self, name, trainable, tags)
    338         trainable=trainable,
    339         checkpoint_path=self._checkpoint_variables_path,
--> 340         name=name)
    341
    342   def _export(self, path, variables_saver):

.../tensorflow_hub/native_module.py in __init__(self, spec, meta_graph, trainable, checkpoint_path, name)
    389     # TPU training code.
    390     with tf.init_scope():
--> 391       self._init_state(name)
    392
    393   def _init_state(self, name):

.../tensorflow_hub/native_module.py in _init_state(self, name)
    392
    393   def _init_state(self, name):
--> 394     variable_tensor_map, self._state_map = self._create_state_graph(name)
    395     self._variable_map = recover_partitioned_variable_map(
    396         get_node_map_from_tensor_map(variable_tensor_map))

.../tensorflow_hub/native_module.py in _create_state_graph(self, name)
    449         meta_graph,
    450         input_map={},
--> 451         import_scope=relative_scope_name)
    452
    453     # Build a list from the variable name in the module definition to the actual

.../tensorflow/python/training/saver.py in import_meta_graph(meta_graph_or_file, clear_devices, import_scope, **kwargs)
   1443   """  # pylint: disable=g-doc-exception
   1444   return _import_meta_graph_with_return_elements(
-> 1445       meta_graph_or_file, clear_devices, import_scope, **kwargs)[0]
   1446
   1447

.../tensorflow/python/training/saver.py in _import_meta_graph_with_return_elements(meta_graph_or_file, clear_devices, import_scope, return_elements, **kwargs)
   1451   """Import MetaGraph, and return both a saver and returned elements."""
   1452   if context.executing_eagerly():
-> 1453     raise RuntimeError("Exporting/importing meta graphs is not supported when "
   1454                        "eager execution is enabled. No graph exists when eager "
   1455                        "execution is enabled.")

RuntimeError: Exporting/importing meta graphs is not supported when eager execution is enabled. No graph exists when eager execution is enabled.
Reitz answered 9/4, 2019 at 4:22 Comment(0)
C
28

In Tensorflow 2.0 you should be using hub.load() or hub.KerasLayer().

[April 2019] - For now only Tensorflow 2.0 modules are loadable via them. In the future many of 1.x Hub modules should be loadable as well.

For the 2.x only modules you can see examples in the notebooks created for the modules here

Cronk answered 9/4, 2019 at 15:41 Comment(2)
Is this still the case? I want tensorflow 2.0 and Elmo :(Lewendal
@Lewendal it does seem to be the case. You can run Elmo with tf2.0 but have to set "trainable=False". This example here works github.com/tensorflow/hub/blob/master/examples/colab/… if you set trainable=False and disable eager execution for tf2.0Doiron
W
7

this function load will work with tensorflow 2

embed = hub.load("https://tfhub.dev/google/universal-sentence-encoder-large/3")

instead of

embed = hub.Module("https://tfhub.dev/google/universal-sentence-encoder-large/3")

[this is not accepted in tf2] use hub.load()

Wycoff answered 7/4, 2020 at 23:12 Comment(0)
H
5

January 2021

To use a model from TF Hub, including ELMO e.g., with tensorflow 2.x load and unpack a model locally:

cd ~/tfhub/elmo3
model_link='https://tfhub.dev/google/elmo/3'
model_link=$model_link'?tf-hub-format=compressed'
wget $model_link -O model
tar xvzf model
rm model

Then use hub.load():

import tensorflow as tf
import tensorflow_hub as hub
elmo = hub.load("/home/user/tfhub/elmo3")

embeddings = elmo.signatures["default"](tf.constant([
                "i like green eggs and ham",
                "i like green ham and eggs"
                ])
                )["elmo"]

This function can handle the deprecated TF1 Hub format

Harwell answered 27/8, 2020 at 15:36 Comment(0)

© 2022 - 2024 — McMap. All rights reserved.