<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0">
  <channel>
    <title>PyPI recent updates for vllm-xft</title>
    <link>https://pypi.tw.martin98.com/project/vllm-xft/</link>
    <description>Recent updates to the Python Package Index for vllm-xft</description>
    <language>en</language>    <item>
      <title>0.5.5.4</title>
      <link>https://pypi.tw.martin98.com/project/vllm-xft/0.5.5.4/</link>
      <description>A high-throughput and memory-efficient inference and serving engine for LLMs</description>
      <pubDate>Tue, 29 Apr 2025 05:08:09 GMT</pubDate>
    </item>    <item>
      <title>0.5.5.3</title>
      <link>https://pypi.tw.martin98.com/project/vllm-xft/0.5.5.3/</link>
      <description>A high-throughput and memory-efficient inference and serving engine for LLMs</description>
      <pubDate>Mon, 17 Mar 2025 07:33:24 GMT</pubDate>
    </item>    <item>
      <title>0.5.5.2</title>
      <link>https://pypi.tw.martin98.com/project/vllm-xft/0.5.5.2/</link>
      <description>A high-throughput and memory-efficient inference and serving engine for LLMs</description>
      <pubDate>Mon, 10 Mar 2025 07:58:30 GMT</pubDate>
    </item>    <item>
      <title>0.5.5.1</title>
      <link>https://pypi.tw.martin98.com/project/vllm-xft/0.5.5.1/</link>
      <description>A high-throughput and memory-efficient inference and serving engine for LLMs</description>
      <pubDate>Thu, 20 Feb 2025 06:55:00 GMT</pubDate>
    </item>    <item>
      <title>0.5.5.0</title>
      <link>https://pypi.tw.martin98.com/project/vllm-xft/0.5.5.0/</link>
      <description>A high-throughput and memory-efficient inference and serving engine for LLMs</description>
      <pubDate>Thu, 29 Aug 2024 08:38:30 GMT</pubDate>
    </item>    <item>
      <title>0.5.3.1</title>
      <link>https://pypi.tw.martin98.com/project/vllm-xft/0.5.3.1/</link>
      <description>A high-throughput and memory-efficient inference and serving engine for LLMs</description>
      <pubDate>Thu, 01 Aug 2024 07:01:16 GMT</pubDate>
    </item>    <item>
      <title>0.5.3.0</title>
      <link>https://pypi.tw.martin98.com/project/vllm-xft/0.5.3.0/</link>
      <description>A high-throughput and memory-efficient inference and serving engine for LLMs</description>
      <pubDate>Mon, 29 Jul 2024 07:40:04 GMT</pubDate>
    </item>    <item>
      <title>0.4.2.2</title>
      <link>https://pypi.tw.martin98.com/project/vllm-xft/0.4.2.2/</link>
      <description>A high-throughput and memory-efficient inference and serving engine for LLMs</description>
      <pubDate>Thu, 04 Jul 2024 07:16:02 GMT</pubDate>
    </item>    <item>
      <title>0.4.2.1</title>
      <link>https://pypi.tw.martin98.com/project/vllm-xft/0.4.2.1/</link>
      <description>A high-throughput and memory-efficient inference and serving engine for LLMs</description>
      <pubDate>Mon, 24 Jun 2024 05:31:21 GMT</pubDate>
    </item>    <item>
      <title>0.4.2.0</title>
      <link>https://pypi.tw.martin98.com/project/vllm-xft/0.4.2.0/</link>
      <description>A high-throughput and memory-efficient inference and serving engine for LLMs</description>
      <pubDate>Wed, 05 Jun 2024 01:38:39 GMT</pubDate>
    </item>    <item>
      <title>0.3.3.1</title>
      <link>https://pypi.tw.martin98.com/project/vllm-xft/0.3.3.1/</link>
      <description>A high-throughput and memory-efficient inference and serving engine for LLMs</description>
      <pubDate>Fri, 10 May 2024 06:07:44 GMT</pubDate>
    </item>    <item>
      <title>0.3.3.0</title>
      <link>https://pypi.tw.martin98.com/project/vllm-xft/0.3.3.0/</link>
      <description>A high-throughput and memory-efficient inference and serving engine for LLMs</description>
      <pubDate>Fri, 29 Mar 2024 05:29:00 GMT</pubDate>
    </item>  </channel>
</rss>