WordTaggingLiteModel.ConfigΒΆ
Component: WordTaggingLiteModel
-
class
WordTaggingLiteModel.
Config
[source] Bases:
WordTaggingModel.Config
All Attributes (including base classes)
- inputs: ByteModelInput = ByteModelInput()
- embedding: CharacterEmbedding.Config = CharacterEmbedding.Config()
- representation: Union[BiLSTMSlotAttention.Config, BSeqCNNRepresentation.Config, PassThroughRepresentation.Config, DeepCNNRepresentation.Config] = PassThroughRepresentation.Config()
- output_layer: Union[WordTaggingOutputLayer.Config, CRFOutputLayer.Config] = WordTaggingOutputLayer.Config()
- decoder: MLPDecoder.Config = MLPDecoder.Config()
Default JSON
{
"inputs": {
"token_bytes": {
"is_input": true,
"column": "text",
"tokenizer": {
"Tokenizer": {
"split_regex": "\\s+",
"lowercase": true,
"use_byte_offsets": false
}
},
"max_seq_len": null,
"max_byte_len": 15,
"offset_for_non_padding": 0,
"add_bos_token": false,
"add_eos_token": false,
"use_eos_token_for_bos": false
},
"labels": {
"is_input": false,
"slot_column": "slots",
"text_column": "text",
"tokenizer": {
"Tokenizer": {
"split_regex": "\\s+",
"lowercase": true,
"use_byte_offsets": false
}
},
"allow_unknown": false
}
},
"embedding": {
"load_path": null,
"save_path": null,
"freeze": false,
"shared_module_key": null,
"embed_dim": 100,
"sparse": false,
"cnn": {
"kernel_num": 100,
"kernel_sizes": [
3,
4
],
"weight_norm": false,
"dilated": false,
"causal": false
},
"highway_layers": 0,
"projection_dim": null,
"export_input_names": [
"char_vals"
],
"vocab_from_train_data": true,
"max_word_length": 20,
"min_freq": 1
},
"representation": {
"PassThroughRepresentation": {
"load_path": null,
"save_path": null,
"freeze": false,
"shared_module_key": null
}
},
"output_layer": {
"WordTaggingOutputLayer": {
"load_path": null,
"save_path": null,
"freeze": false,
"shared_module_key": null,
"loss": {
"CrossEntropyLoss": {}
},
"label_weights": {},
"ignore_pad_in_loss": true
}
},
"decoder": {
"load_path": null,
"save_path": null,
"freeze": false,
"shared_module_key": null,
"hidden_dims": [],
"out_dim": null,
"layer_norm": false,
"dropout": 0.0,
"bias": true,
"activation": "relu",
"temperature": 1.0,
"spectral_normalization": false
}
}