<package type="rpm">
	<name>koboldcpp</name>
	<arch>src</arch>
	<version epoch="0" ver="1.100.1" rel="1"/>
	<checksum type="sha256" pkgid="YES">a3ed6a0c94237892563884d030f2d960e7d74b6966c28322ad54eb2ef0e35d48</checksum>
	<summary>Run GGUF models easily with a KoboldAI UI. One File. Zero Install.</summary>
	<description>KoboldCpp is an easy-to-use AI text-generation software for GGML and GGUF models, inspired by the original KoboldAI.
It's a single self-contained distributable from Concedo, that builds off llama.cpp, and adds a versatile KoboldAI API endpoint, additional format support,
Stable Diffusion image generation, speech-to-text, backward compatibility, as well as a fancy UI with persistent stories, editing tools, save formats, memory,
world info, author's note, characters, scenarios and everything KoboldAI and KoboldAI Lite have to offer.</description>
	<packager>angrypenguin &lt;angrypenguinpoland@gmail.com&gt;</packager>
	<url>https://github.com/LostRuins/koboldcpp</url>
	<time file="1768693147" build="1768692700"/>
	<size package="45029880" installed="45018055" archive="0"/>
	<location href="koboldcpp-1.100.1-1.src.rpm"/>
	<format>
		<rpm:license>AGPL3.0</rpm:license>
		<rpm:vendor>OpenMandriva</rpm:vendor>
		<rpm:group>System/AI</rpm:group>
		<rpm:buildhost>ryzen9-4.openmandriva.org</rpm:buildhost>
		<rpm:sourcerpm></rpm:sourcerpm>
		<rpm:header-range start="5032" end="11648"/>
		<rpm:provides>
			<rpm:entry name="koboldcpp" flags="EQ" ver="1.100.1" rel="1"/>
		</rpm:provides>
		<rpm:requires>
			<rpm:entry name="make"/>
			<rpm:entry name="pkgconfig(python)"/>
			<rpm:entry name="pkgconfig(vulkan)"/>
			<rpm:entry name="rpmlib(LargeFiles)" flags="LE" ver="4.12.0" rel="1"/>
		</rpm:requires>
	</format>
</package>
