RoBERTaWordTaggingModel.ConfigΒΆ

Component: RoBERTaWordTaggingModel

class RoBERTaWordTaggingModel.Config[source]

Bases: BaseModel.Config

All Attributes (including base classes)

Default JSON

{
    "inputs": {
        "tokens": {
            "is_input": true,
            "columns": [
                "text"
            ],
            "tokenizer": {
                "GPT2BPETokenizer": {
                    "bpe_encoder_path": "manifold://pytext_training/tree/static/vocabs/bpe/gpt2/encoder.json",
                    "bpe_vocab_path": "manifold://pytext_training/tree/static/vocabs/bpe/gpt2/vocab.bpe",
                    "lowercase": false
                }
            },
            "base_tokenizer": null,
            "vocab_file": "gpt2_bpe_dict",
            "max_seq_len": 256,
            "add_selfie_token": false,
            "labels_columns": [
                "label"
            ],
            "labels": []
        }
    },
    "encoder": {
        "RoBERTaEncoderJit": {
            "load_path": null,
            "save_path": null,
            "freeze": false,
            "shared_module_key": null,
            "output_dropout": 0.4,
            "embedding_dim": 768,
            "pooling": "cls_token",
            "export": false,
            "projection_dim": 0,
            "normalize_output_rep": false,
            "pretrained_encoder": {
                "load_path": "public",
                "save_path": null,
                "freeze": false,
                "shared_module_key": null
            }
        }
    },
    "decoder": {
        "load_path": null,
        "save_path": null,
        "freeze": false,
        "shared_module_key": null,
        "hidden_dims": [],
        "out_dim": null,
        "layer_norm": false,
        "dropout": 0.0,
        "bias": true,
        "activation": "relu",
        "temperature": 1.0,
        "spectral_normalization": false
    },
    "output_layer": {
        "load_path": null,
        "save_path": null,
        "freeze": false,
        "shared_module_key": null,
        "loss": {
            "CrossEntropyLoss": {}
        },
        "label_weights": {},
        "ignore_pad_in_loss": true
    }
}