src/erllama.app.src

{application,erllama,
             [{description,"Native Erlang/OTP wrapper around llama.cpp with token-exact tiered KV cache; cache more warm state than fits in RAM"},
              {vsn,"0.1.0"},
              {registered,[erllama_sup,erllama_cache_sup,
                           erllama_cache_meta_srv]},
              {mod,{erllama_app,[]}},
              {applications,[kernel,stdlib,sasl,crypto]},
              {env,[{tiers,[#{backend => ram,quota_mb => 4096}]},
                    {min_tokens,512},
                    {cold_min_tokens,512},
                    {cold_max_tokens,30000},
                    {continued_interval,2048},
                    {boundary_trim_tokens,32},
                    {boundary_align_tokens,2048},
                    {evict_save_timeout_ms,30000},
                    {session_resume_wait_ms,500},
                    {fingerprint_mode,safe},
                    {scheduler,#{enabled => false,high_watermark => 0.85,
                                 low_watermark => 0.75,
                                 pressure_source => noop,interval_ms => 5000,
                                 min_evict_bytes => 1048576,
                                 evict_tiers => [ram,ram_file]}}]},
              {modules,[]},
              {licenses,["MIT"]},
              {links,[]},
              {files,["rebar.config","rebar.lock","do_cmake.sh","do_llama.sh",
                      "README.md","CHANGELOG.md","ROADMAP.md","AGENTS.md",
                      "LICENSE","UPDATE_LLAMA.md","config","src","include",
                      "guides","internals","c_src/CMakeLists.txt",
                      "c_src/CMake","c_src/erllama_nif.c",
                      "c_src/erllama_safe.cpp","c_src/crc32c.c",
                      "c_src/crc32c.h","c_src/llama.cpp"]},
              {exclude_files,["c_src/llama.cpp/.git",
                              "c_src/llama.cpp/.github",
                              "c_src/llama.cpp/.devops",
                              "c_src/llama.cpp/.gemini","c_src/llama.cpp/.pi",
                              "c_src/llama.cpp/.pre-commit-config.yaml",
                              "c_src/llama.cpp/.dockerignore",
                              "c_src/llama.cpp/AUTHORS",
                              "c_src/llama.cpp/build-xcframework.sh"]}]}.