π Bug
import padl
torch.nn = padl.transform(torch.nn)
class HiddenState(torch.nn.Module):
def __init__(self, layer):
super().__init__()
self.layer = layer
def forward(self, x):
return self.layer(x)[0]
def build_classifier(
rnn_layer,
n_tokens,
):
return (
torch.nn.Embedding(n_tokens, rnn_layer.n_input)
>> padl.transform(rnn_layer)
>> padl.same[0]
>> torch.nn.Linear(rnn_layer.n_hidden, n_tokens)
)
config.py
from padl import params, save
from my_codebase.models import build_classifier
import torch
rnn = torch.nn.GRU(
**params(
'rnn',
input_size=64,
hidden_size=512,
num_layers=1,
)
)
layer = build_classifier(
rnn_layer=rnn,
**params(
'classifier',
n_tokens=16,
)
)
save(layer, 'my_classifier.padl')
run config.py
---------------------------------------------------------------------------
NameNotFound Traceback (most recent call last)
<ipython-input-4-66f7d590781c> in <module>
----> 1 layer.pd_save('my_classifier.padl', force_overwrite=True)
~/lf1-io/padl/padl/transforms.py in pd_save(self, path, force_overwrite, strict_requirements)
428
429 with TemporaryDirectory('.padl') as dirname:
--> 430 self.pd_save(dirname, False, strict_requirements=strict_requirements)
431 rmtree(path)
432 copytree(dirname, path)
~/lf1-io/padl/padl/transforms.py in pd_save(self, path, force_overwrite, strict_requirements)
438 for i, subtrans in enumerate(self._pd_all_transforms()):
439 subtrans.pd_pre_save(path, i, options=options)
--> 440 code, requirements = self._pd_dumps(return_requirements=True,
441 strict_requirements=strict_requirements,
442 path=path)
~/lf1-io/padl/padl/transforms.py in _pd_dumps(self, return_requirements, path, strict_requirements)
665 be found. If *False* print a warning if that's the case.
666 """
--> 667 graph = self._pd_build_codegraph(name='_pd_main')
668 Serializer.save_all(graph, path)
669 code = graph.dumps()
~/lf1-io/padl/padl/transforms.py in _pd_build_codegraph(self, graph, name)
1802 varname = transform.pd_varname(self._pd_call_info.scope)
1803 # pylint: disable=protected-access
-> 1804 transform._pd_build_codegraph(graph, varname)
1805
1806 self._pd_codegraph_find_dependencies(graph, todo)
~/lf1-io/padl/padl/transforms.py in _pd_build_codegraph(self, graph, name)
554 todo = self._pd_codegraph_add_startnodes(graph, new_name)
555
--> 556 self._pd_codegraph_find_dependencies(graph, todo)
557
558 return graph
~/lf1-io/padl/padl/transforms.py in _pd_codegraph_find_dependencies(self, graph, todo)
579 # Only triggered if KeyError or AttributeError is raised
580 # find how *next_name* came into being
--> 581 next_codenode = find_codenode(next_name,
582 self._pd_external_full_dump_modules)
583
~/lf1-io/padl/padl/dumptools/var2mod.py in find_codenode(name, full_dump_module_names)
703 def find_codenode(name: ScopedName, full_dump_module_names=None) -> "CodeNode":
704 """Find the :class:`CodeNode` corresponding to a :class:`ScopedName` *name*. """
--> 705 (source, node), found_name = find_in_scope(name)
706
707 module_name = None
~/lf1-io/padl/padl/dumptools/symfinder.py in find_in_scope(scoped_name)
788 if scope.module is None:
789 raise NameNotFound(format_scoped_name_not_found(scoped_name))
--> 790 source, node, name = find_scopedname(searched_name)
791 if getattr(node, '_globalscope', False):
792 scope = Scope.empty()
~/lf1-io/padl/padl/dumptools/symfinder.py in find_scopedname(scoped_name)
963 module = sys.modules['__main__']
964 try:
--> 965 return find_scopedname_in_module(scoped_name, module)
966 except TypeError as exc:
967 if module is not sys.modules['__main__']:
~/lf1-io/padl/padl/dumptools/symfinder.py in find_scopedname_in_module(scoped_name, module)
866 def find_scopedname_in_module(scoped_name: ScopedName, module):
867 source = sourceget.get_module_source(module)
--> 868 return find_scopedname_in_source(scoped_name, source)
869
870
~/lf1-io/padl/padl/dumptools/symfinder.py in find_scopedname_in_source(scoped_name, source, tree)
846 ScopedName(var_name, scoped_name.scope, (pos.lineno, pos.col_offset))
847 )
--> 848 raise NameNotFound(
849 format_scoped_name_not_found(scoped_name)
850 )
NameNotFound: Could not find "n_tokens" in scope "my_codebase.models.build_classifier".
Please make sure that "n_tokens" is defined .
bug