{"id":26541364,"url":"https://github.com/coffeevampir3/modularizedlcppserver","last_synced_at":"2025-03-22T01:20:15.137Z","repository":{"id":283226401,"uuid":"951076005","full_name":"CoffeeVampir3/ModularizedLcppServer","owner":"CoffeeVampir3","description":"Cpp23 Modularized Multi-User batching server for lcpp","archived":false,"fork":false,"pushed_at":"2025-03-19T06:09:09.000Z","size":19,"stargazers_count":0,"open_issues_count":0,"forks_count":0,"subscribers_count":1,"default_branch":"master","last_synced_at":"2025-03-19T07:22:10.819Z","etag":null,"topics":["batching","cpp","cpp23","llamacpp","llm-inference","modules","server"],"latest_commit_sha":null,"homepage":"","language":"C++","has_issues":true,"has_wiki":null,"has_pages":null,"mirror_url":null,"source_name":null,"license":null,"status":null,"scm":"git","pull_requests_enabled":true,"icon_url":"https://github.com/CoffeeVampir3.png","metadata":{"files":{"readme":"README.md","changelog":null,"contributing":null,"funding":null,"license":null,"code_of_conduct":null,"threat_model":null,"audit":null,"citation":null,"codeowners":null,"security":null,"support":null,"governance":null,"roadmap":null,"authors":null,"dei":null,"publiccode":null,"codemeta":null}},"created_at":"2025-03-19T05:59:20.000Z","updated_at":"2025-03-19T06:09:12.000Z","dependencies_parsed_at":"2025-03-19T07:22:12.364Z","dependency_job_id":"98674542-b5d0-469a-bede-d19ed8f97c88","html_url":"https://github.com/CoffeeVampir3/ModularizedLcppServer","commit_stats":null,"previous_names":["coffeevampir3/modularizedlcppserver"],"tags_count":0,"template":false,"template_full_name":null,"repository_url":"https://repos.ecosyste.ms/api/v1/hosts/GitHub/repositories/CoffeeVampir3%2FModularizedLcppServer","tags_url":"https://repos.ecosyste.ms/api/v1/hosts/GitHub/repositories/CoffeeVampir3%2FModularizedLcppServer/tags","releases_url":"https://repos.ecosyste.ms/api/v1/hosts/GitHub/repositories/CoffeeVampir3%2FModularizedLcppServer/releases","manifests_url":"https://repos.ecosyste.ms/api/v1/hosts/GitHub/repositories/CoffeeVampir3%2FModularizedLcppServer/manifests","owner_url":"https://repos.ecosyste.ms/api/v1/hosts/GitHub/owners/CoffeeVampir3","download_url":"https://codeload.github.com/CoffeeVampir3/ModularizedLcppServer/tar.gz/refs/heads/master","host":{"name":"GitHub","url":"https://github.com","kind":"github","repositories_count":244890812,"owners_count":20527153,"icon_url":"https://github.com/github.png","version":null,"created_at":"2022-05-30T11:31:42.601Z","updated_at":"2022-07-04T15:15:14.044Z","host_url":"https://repos.ecosyste.ms/api/v1/hosts/GitHub","repositories_url":"https://repos.ecosyste.ms/api/v1/hosts/GitHub/repositories","repository_names_url":"https://repos.ecosyste.ms/api/v1/hosts/GitHub/repository_names","owners_url":"https://repos.ecosyste.ms/api/v1/hosts/GitHub/owners"}},"keywords":["batching","cpp","cpp23","llamacpp","llm-inference","modules","server"],"created_at":"2025-03-22T01:20:14.574Z","updated_at":"2025-03-22T01:20:15.132Z","avatar_url":"https://github.com/CoffeeVampir3.png","language":"C++","readme":"C++ https://github.com/ggerganov/llama.cpp modularized server setup for bindings with continuous batching support for multi-user inference. This is designed to be exposed as C-compatible binding interface, this repo serves as an example of pure C++ and is mostly here to serve as an example.\n\nSpecialized Features:\n- Inference rewinding\n- Multi-User inference with Continuous Batching\n- Minimum tokens\n- Fully asynchronous inference design for FFI servers\n\nThis is a modernized version of the (more compatible for general users) bindings I wrote for https://github.com/theroyallab/YALS\n","funding_links":[],"categories":[],"sub_categories":[],"project_url":"https://awesome.ecosyste.ms/api/v1/projects/github.com%2Fcoffeevampir3%2Fmodularizedlcppserver","html_url":"https://awesome.ecosyste.ms/projects/github.com%2Fcoffeevampir3%2Fmodularizedlcppserver","lists_url":"https://awesome.ecosyste.ms/api/v1/projects/github.com%2Fcoffeevampir3%2Fmodularizedlcppserver/lists"}