| [generator] | |
| PROVIDER = huggingface | |
| MODEL = meta-llama/Meta-Llama-3-8B-Instruct | |
| MAX_TOKENS = 768 | |
| TEMPERATURE = 0.2 | |
| INFERENCE_PROVIDER = novita | |
| ORGANIZATION = GIZ | |
| [reader] | |
| TYPE = INF_PROVIDERS | |
| INF_PROVIDER_MODEL = meta-llama/Llama-3.1-8B-Instruct | |
| DEDICATED_MODEL = meta-llama/Llama-3.1-8B-Instruct | |
| DEDICATED_ENDPOINT = https://qu2d8m6dmsollhly.us-east-1.aws.endpoints.huggingface.cloud | |
| NVIDIA_MODEL = meta-llama/Llama-3.1-8B-Instruct | |
| NVIDIA_ENDPOINT = https://huggingface.co/api/integrations/dgx/v1 | |
| MAX_TOKENS = 768 | |
| INF_PROVIDER = nebius | |
| [app] | |
| dropdown_default = Annual Consolidated OAG 2024 |