ContextualIntentSlotRepresentation.ConfigΒΆ

Component: ContextualIntentSlotRepresentation

class ContextualIntentSlotRepresentation.Config[source]

Bases: RepresentationBase.Config

All Attributes (including base classes)

load_path: Optional[str] = None
save_path: Optional[str] = None
freeze: bool = False
shared_module_key: Optional[str] = None
sen_representation: DocNNRepresentation.Config = DocNNRepresentation.Config()
seq_representation: DocNNRepresentation.Config = DocNNRepresentation.Config()
joint_representation: Union[BiLSTMDocSlotAttention.Config, JointCNNRepresentation.Config] = BiLSTMDocSlotAttention.Config()

Default JSON

{
    "load_path": null,
    "save_path": null,
    "freeze": false,
    "shared_module_key": null,
    "sen_representation": {
        "load_path": null,
        "save_path": null,
        "freeze": false,
        "shared_module_key": null,
        "dropout": 0.4,
        "cnn": {
            "kernel_num": 100,
            "kernel_sizes": [
                3,
                4
            ],
            "weight_norm": false,
            "dilated": false,
            "causal": false
        }
    },
    "seq_representation": {
        "load_path": null,
        "save_path": null,
        "freeze": false,
        "shared_module_key": null,
        "dropout": 0.4,
        "cnn": {
            "kernel_num": 100,
            "kernel_sizes": [
                3,
                4
            ],
            "weight_norm": false,
            "dilated": false,
            "causal": false
        }
    },
    "joint_representation": {
        "BiLSTMDocSlotAttention": {
            "load_path": null,
            "save_path": null,
            "freeze": false,
            "shared_module_key": null,
            "dropout": 0.4,
            "lstm": {
                "BiLSTM": {
                    "load_path": null,
                    "save_path": null,
                    "freeze": false,
                    "shared_module_key": null,
                    "dropout": 0.4,
                    "lstm_dim": 32,
                    "num_layers": 1,
                    "bidirectional": true,
                    "pack_sequence": true
                }
            },
            "pooling": null,
            "slot_attention": null,
            "doc_mlp_layers": 0,
            "word_mlp_layers": 0
        }
    }
}