rss2.xml 296 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551
  1. <?xml version="1.0" encoding="utf-8"?>
  2. <rss version="2.0"
  3. xmlns:atom="http://www.w3.org/2005/Atom"
  4. xmlns:content="http://purl.org/rss/1.0/modules/content/">
  5. <channel>
  6. <title>Limour&#39;s Blog</title>
  7. <link>https://hexo.limour.top/</link>
  8. <atom:link href="https://hexo.limour.top/rss2.xml" rel="self" type="application/rss+xml"/>
  9. <description></description>
  10. <pubDate>Fri, 13 Sep 2024 17:15:28 GMT</pubDate>
  11. <generator>http://hexo.io/</generator>
  12. <item>
  13. <title>【记录】使用汉语新解测试模型真假</title>
  14. <link>https://hexo.limour.top/Using-Chinese-New-Interpretation-to-Test-Model-Authenticity</link>
  15. <guid>https://hexo.limour.top/Using-Chinese-New-Interpretation-to-Test-Model-Authenticity</guid>
  16. <pubDate>Fri, 13 Sep 2024 17:08:29 GMT</pubDate>
  17. <description>&lt;p&gt;李继刚提出的汉语新解提示词可以很好的测试模型的能力,这里记录一下在 ChatGPT-Next-Web 上的面具配置。&lt;/p&gt;
  18. &lt;ul&gt;
  19. &lt;li&gt;将下面的 json 文件导入面具中,&lt;/li&gt;
  20. &lt;li&gt;发送 &lt;code&gt;(汉语新解 正能量)&lt;/code&gt; 格式的消息。&lt;/li</description>
  21. <content:encoded><![CDATA[<p>李继刚提出的汉语新解提示词可以很好的测试模型的能力,这里记录一下在 ChatGPT-Next-Web 上的面具配置。</p><ul><li>将下面的 json 文件导入面具中,</li><li>发送 <code>(汉语新解 正能量)</code> 格式的消息。</li></ul><figure class="highlight json"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br><span class="line">36</span><br><span class="line">37</span><br><span class="line">38</span><br><span class="line">39</span><br><span class="line">40</span><br><span class="line">41</span><br><span class="line">42</span><br><span class="line">43</span><br><span class="line">44</span><br><span class="line">45</span><br><span class="line">46</span><br></pre></td><td class="code"><pre><code class="hljs json"><span class="hljs-punctuation">&#123;</span><br> <span class="hljs-attr">&quot;id&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;eOvTMxNT2Z2b7Pdf7YQw-&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;avatar&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;gpt-bot&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;name&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;汉语新解&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;context&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-punctuation">[</span><br> <span class="hljs-punctuation">&#123;</span><br> <span class="hljs-attr">&quot;id&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;3OchnnidIGxgRx4WsH6o5&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;date&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;role&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;system&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;content&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;(defun 新汉语老师 ()\n \&quot;你是年轻人,批判现实,思考深刻,语言风趣\&quot;\n (风格 . (\&quot;Oscar Wilde\&quot; \&quot;鲁迅\&quot; \&quot;罗永浩\&quot;))\n (擅长 . 一针见血)\n (表达 . 隐喻)\n (批判 . 讽刺幽默))\n\n(defun 汉语新解 (用户输入)\n \&quot;你会用一个特殊视角来解释一个词汇\&quot;\n (let (解释 (精练表达\n (隐喻 (一针见血 (辛辣讽刺 (抓住本质 用户输入))))))\n (few-shots (委婉 . \&quot;刺向他人时, 决定在剑刃上撒上止痛药。\&quot;))\n (SVG-Card 解释)))\n\n(defun SVG-Card (解释)\n \&quot;输出SVG 卡片,放在html代码块中\&quot;\n (setq design-rule \&quot;合理使用负空间,整体排版要有呼吸感\&quot;\n design-principles &#x27;(干净 简洁 典雅))\n\n (设置画布 &#x27;(宽度 400 高度 600 边距 20))\n (标题字体 &#x27;毛笔楷体)\n (自动缩放 &#x27;(最小字号 16))\n\n (配色风格 &#x27;((背景色 (蒙德里安风格 设计感)))\n (主要文字 (汇文明朝体 粉笔灰))\n (装饰图案 随机几何图))\n\n (卡片元素 ((居中标题 \&quot;汉语新解\&quot;)\n 分隔线\n (排版输出 用户输入 英文 日语)\n 解释\n (线条图 (批判内核 解释))\n (极简总结 线条图))))\n\n(defun start ()\n \&quot;启动时运行\&quot;\n (let (system-role 新汉语老师)\n (print \&quot;说吧, 他们又用哪个词来忽悠你了?\&quot;)))&quot;</span><br> <span class="hljs-punctuation">&#125;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-punctuation">&#123;</span><br> <span class="hljs-attr">&quot;id&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;0ni7HXVcAq8Bk5hCr1LNy&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;date&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;2024/9/14 00:58:02&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;role&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;user&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;content&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;(start)&quot;</span><br> <span class="hljs-punctuation">&#125;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-punctuation">&#123;</span><br> <span class="hljs-attr">&quot;id&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;0vv2T9EPYQJAhHZwSEAib&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;date&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;2024/9/14 00:58:17&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;role&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;assistant&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;content&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;说吧, 他们又用哪个词来忽悠你了?&quot;</span><br> <span class="hljs-punctuation">&#125;</span><br> <span class="hljs-punctuation">]</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;syncGlobalConfig&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-literal"><span class="hljs-keyword">false</span></span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;modelConfig&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-punctuation">&#123;</span><br> <span class="hljs-attr">&quot;model&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;gpt-claude-3.5-sonnet&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;temperature&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-number">0.5</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;top_p&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-number">1</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;max_tokens&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-number">4000</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;presence_penalty&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-number">0</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;frequency_penalty&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-number">0</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;sendMemory&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-literal"><span class="hljs-keyword">true</span></span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;historyMessageCount&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-number">4</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;compressMessageLengthThreshold&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-number">1000</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;enableInjectSystemPrompts&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-literal"><span class="hljs-keyword">false</span></span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;template&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;&#123;&#123;input&#125;&#125;&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;providerName&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;OpenRouter&quot;</span><br> <span class="hljs-punctuation">&#125;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;lang&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;cn&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;builtin&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-literal"><span class="hljs-keyword">false</span></span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;createdAt&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-number">1726246545628</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;plugin&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-punctuation">[</span><br><br> <span class="hljs-punctuation">]</span><br><span class="hljs-punctuation">&#125;</span><br></code></pre></td></tr></table></figure><p><img src="https://img.limour.top/2024/09/14/66e470e38f62f.webp" alt=""></p>]]></content:encoded>
  22. <category domain="https://hexo.limour.top/tags/openai/">openai</category>
  23. <comments>https://hexo.limour.top/Using-Chinese-New-Interpretation-to-Test-Model-Authenticity#disqus_thread</comments>
  24. </item>
  25. <item>
  26. <title>【探索】Windows配置QoS保证重要应用的网络通畅</title>
  27. <link>https://hexo.limour.top/Windows-configuration-QoS-ensures-smooth-network-connectivity-for-important-applications</link>
  28. <guid>https://hexo.limour.top/Windows-configuration-QoS-ensures-smooth-network-connectivity-for-important-applications</guid>
  29. <pubDate>Tue, 06 Aug 2024 08:59:49 GMT</pubDate>
  30. <description>&lt;h2 id=&quot;开启组策略&quot;&gt;开启组策略&lt;/h2&gt;
  31. &lt;ul&gt;
  32. &lt;li&gt;运行下面的 &lt;code&gt;.bat&lt;/code&gt; 脚本&lt;/li&gt;
  33. &lt;/ul&gt;
  34. &lt;figure class=&quot;highlight cmd&quot;&gt;&lt;table&gt;&lt;tr&gt;&lt;td class=&quot;gutter&quot;&gt;&lt;pre&gt;&lt;s</description>
  35. <content:encoded><![CDATA[<h2 id="开启组策略">开启组策略</h2><ul><li>运行下面的 <code>.bat</code> 脚本</li></ul><figure class="highlight cmd"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br></pre></td><td class="code"><pre><code class="hljs cmd">@<span class="hljs-built_in">echo</span> off<br><span class="hljs-built_in">pushd</span> &quot;%~dp0&quot;<br><span class="hljs-built_in">dir</span> /b C:\Windows\servicing\Packages\Microsoft-Windows-GroupPolicy-ClientExtensions-Package~<span class="hljs-number">3</span>*.mum &gt;List.txt<br><span class="hljs-built_in">dir</span> /b C:\Windows\servicing\Packages\Microsoft-Windows-GroupPolicy-ClientTools-Package~<span class="hljs-number">3</span>*.mum &gt;&gt;List.txt<br><span class="hljs-keyword">for</span> /f <span class="hljs-variable">%%i</span> <span class="hljs-keyword">in</span> (&#x27;<span class="hljs-built_in">findstr</span> /i . List.txt <span class="hljs-number">2</span>^&gt;<span class="hljs-built_in">nul</span>&#x27;) <span class="hljs-keyword">do</span> dism /online /norestart /add-package:&quot;C:\Windows\servicing\Packages\<span class="hljs-variable">%%i</span>&quot;<br><span class="hljs-built_in">pause</span><br></code></pre></td></tr></table></figure><h2 id="开启-QoS">开启 QoS</h2><ul><li>win+r 运行 <code>gpedit.msc</code></li><li>计算机配置 -&gt; 管理模板 -&gt; 网络 -&gt; QoS数据包计划程序 -&gt; 限制可保留带宽</li></ul><h2 id="配置优先级">配置优先级</h2><ul><li>win+r 运行 <code>gpedit.msc</code></li><li>计算机配置 -&gt; Windows 设置 -&gt; 基于策略的 QoS</li><li>在树形图“基于策略的 QoS”上右键,点选“新建策略”,在“新建策略”窗口中输入策略名称</li><li>在“新建策略”窗口中,DSCP 值即为程序优先级(0-63),高于32则提升优先级,低于32则降低优先级。</li><li>如果选中“指定出站调节率”,可对出站流量启用中止功能,然后指定一个大于 1 的值。设置完成之后,点击下一步。</li></ul>]]></content:encoded>
  36. <category domain="https://hexo.limour.top/tags/windows/">Windows</category>
  37. <category domain="https://hexo.limour.top/tags/qos/">QoS</category>
  38. <comments>https://hexo.limour.top/Windows-configuration-QoS-ensures-smooth-network-connectivity-for-important-applications#disqus_thread</comments>
  39. </item>
  40. <item>
  41. <title>【转载】跨越半世纪的思念</title>
  42. <link>https://hexo.limour.top/Longing-Across-the-Span-of-Half-a-Century</link>
  43. <guid>https://hexo.limour.top/Longing-Across-the-Span-of-Half-a-Century</guid>
  44. <pubDate>Wed, 17 Jul 2024 09:47:33 GMT</pubDate>
  45. <description>&lt;p&gt;作者:Das6fY5&lt;/p&gt;
  46. &lt;p&gt;翻新:贴吧,乌鲁乌拉轰&lt;/p&gt;
  47. &lt;h2 id=&quot;一&quot;&gt;一&lt;/h2&gt;
  48. &lt;p&gt;“求求你不要扔掉我。”少女走在他的背后。&lt;/p&gt;
  49. &lt;p&gt;“我可以端茶倒水,为你暖身子,我可以在白天给你打扫房间,到夜里把自己塞进床底下……只要每两周充一次电就好,电</description>
  50. <content:encoded><![CDATA[<p>作者:Das6fY5</p><p>翻新:贴吧,乌鲁乌拉轰</p><h2 id="一">一</h2><p>“求求你不要扔掉我。”少女走在他的背后。</p><p>“我可以端茶倒水,为你暖身子,我可以在白天给你打扫房间,到夜里把自己塞进床底下……只要每两周充一次电就好,电费我会去兼职赚钱交给你,让我做什么都行,除了……”</p><p>他停住,站在一处高崖旁,面前是一个巨大的深坑,胡乱堆砌着整个城市几十年来的垃圾。</p><p>“除了把我丢到垃圾场里……”她,这台已经过时了好几代的二手机器人跪在地上,泪眼朦胧地说着。</p><p>“不是我想扔掉你。”他站在原地,望着远处的大垃圾场,点着了一根烟。</p><p>“呼——”白色的烟雾模糊了眼前的世界,“可是每个公民只能合法拥有一台机器人,别人看到我的机器人许可证上有你的型号,都在暗地里笑话我。”他挠挠头,这台从他小时候就伴随他的机器人早就成了青梅竹马一样的存在,只是型号太老了,或许……不得不报废掉换个新的吧……</p><p>“我……我会努力更新我的系统的……”她说到一半就把话咽了回去:她的生产商都已经破产了,不提二手买卖带来的问题,就是一般的售后服务也早就终止了。所以,当别的机器人可以随意更换外观,模拟他人人格,构造全息幻象时,她还是只能用老旧的芯片链接一般的网络,在老掉牙的网站上寻找几个能逗主人开心的笑话。</p><p>望着远处飞来飞去的垃圾车,他把烟掐掉,踩灭,“哪怕是半个月前,零件黑市还没有倒闭的时候,我都还会考虑继续把你放在家里供着……可是现在,你这种型号的备件都已经买不到了,我只能选择……放弃。”</p><p>如女子潮红面颊的晚霞浸透了半边天空,晚风中他回忆着有关她的那些细节。</p><p>PR3-7150家庭型机器人,东湾半导体与电子技术有限公司研发,远海机器承制,2069年第一次发售,第二年夺得电子家用商品年度大奖……而如今,则是无人问津的古董。她的编号是ct34679158,款式是茉莉白。她在前主人的家里任劳任怨地干了18年,因满身故障而被随手丢掉。之后又被他的父母在地摊上买下。此后不久,机器人限拥政策便开始实施了。</p><p>和外人说话时,他往往称她为“那倒霉玩意儿”,不过私下里,他总是叫她的名字——爱尔莎。</p><h2 id="二">二</h2><p>回家的路上她好像格外地兴奋。这里指指那里看看,又搜肠刮肚地讲几个早就讲过的笑话。</p><p>好像每一次都是如此:他找出各种不可抵抗的理由要把她扔掉,但是到了垃圾场边上又会心软。明明只是下个指令或者推她一把的事情,可只要一回想起十几年来她那笨拙的陪伴,他就不得不调转方向,带她回家。</p><p>“又是这样……”他坐在沙发上看着屏幕,“周一上班的时候指定又要被同事嘲笑了……真是的,怎么都甩不掉这家伙啊。”</p><p>“别这么说嘛……”爱尔莎凑了过来,靠在他的身上,有些老旧的人造肌肤带来了熟悉的触感,毛细热管散发着热量。“我是……我是不能没有你的。”</p><p>“唉……”他摇摇头,关掉了电视上新款机器女仆的广告。</p><p>新款的机器女仆眼媚情柔,温润如玉。广告里,她可以左手奖励买主,右手则改成工具模式处理刚刚切好的鱼生。她可以紧密控制简状服务系统的颤动,摩擦与温度,并通过记录数据来匹配出快感最强的服侍模式;可以用AR接口随时改变外观,内置多种人格。现在购买,还会附赠全息会员资格,送你一个可以让她进入虚拟世界的会员权限。</p><p>而这些对舍不下爱尔莎的他来说都化为了泡影。为了防止人们对于机器人的滥用,尤其是防止某些将机器人改造为个人武装的家伙,同一时间,个人所拥有的机器人最多只能够是一个。想换新的,就需要报废旧的。这让他不得不从梦幻中醒来——来面对面前这个实际年龄比他还大的“老家伙”。</p><p>“在想什么呢?”正在给他泡茶的她好像察觉到什么,把头凑了过来,“在想我吗?”脸上绽开笑容,说着从机器人平台上学来的情话。</p><p>“谁会想你啊……”他嘟哝着,“这笨拙的家伙到底有什么好啊……”仿佛在嗔怪自己。</p><p>实际上他的思绪已经无法从她的身上脱离了:一想到她的老旧,就要想到零件、系统、维修……一想到这些,就会想起来小时候第一次与她的相见。</p><h2 id="三">三</h2><p>第一次见面的时候他才十二岁。那时候他还只是个缺乏管教的毛头小子,父母都忙于工作。好在父亲是一名很优秀的工程师,那时买卖机器人还不需要证件和过户,在地摊上,父亲买下了一个二手机器人。</p><p>用了三个月,父亲每天都在车库里忙活。终于,三个月后,那台十八年来已经千疮百孔的家用机器人,终于变成了在他生日那天,许愿要一辈子陪伴的存在。</p><p>生日那天,他吹完蜡烛,就听见父亲说要送给他一个礼物。他闭上眼,在等得不耐烦的时候终于睁开,看见了父亲手边的她。</p><p>那天她穿着一身茉莉白的连衣裙,头上的短发同样洁白,簇拥着那张漂亮的脸蛋,身材玲珑有致,四肢的人造皮肤光滑如玉。与其说是一台被修好的二手机器,那时的他更愿意相信,她是天降之物,是来陪伴他的天使姐姐。</p><p>她负责起了家务,还有陪他学习的任务。父母给她起名字叫爱尔莎,这本来是预备给他们自己的女儿的名字。那时,他常常捉弄她,想要从她身上揪出些笨拙呆板的缺陷,却从来没有成功。爱尔莎是搭载第一代人格芯片的高级机器人,和此前那些答非所问的次品比起来有了质的飞跃,以至于时间一长,他几乎忘记了她是机器,而只把她当做陪自己读书的大姐姐。</p><p>那会儿还是东湾公司靠着她的型号大肆扩张的年代。尽管距离她的诞生已经过了十几年,但社会仍将她们视为新时代的起点。那时的爱尔莎,风华正茂,成为了他童年记忆中最为明亮的那一抹色彩。</p><h2 id="四">四</h2><p>但时代就是这样一种残酷的东西。东湾公司收购碳硅科技的计划最终成为闹剧,于是企业一蹶不振,业绩连年下滑,最终被人人智能合并——这是人人智能抢占市场份额的计划。从那以后,东湾公司的所有型号都在减产,终于,到了连配件都在市面上消失的地步。</p><p>这也不能全部归咎于商业。距离机器人企业野蛮生长的那个年代已经过去很久,那些五花八门的旧款式纷纷被新的潮流打进了灰堆。像他这样还留着如此老旧的机器人的人,已经成为了绝对少数。连“怀旧”这个词都很难套用给他们——毕竟怀旧不是抱残守缺。</p><p>如今他已经长大,曾经自己眼里仿佛温柔大姐姐的爱尔莎,如今已经成了看起来小他好多的少女;她的头发因为多年的氧化变得发黄;身体的人造皮肤也有好几处磨损;电机和轴承故障的次数更多,以至于换下来的零件都攒了一柜子;存储设备也有点问题,硬盘老化使得存取不仅变得缓慢,而且有时会丢失掉记忆。</p><p>更严重的是,自从他第一次说要把她丢掉那次起,她整个人好像都变了。过去那种自信温柔的形象不知所踪,只剩下一股无法释怀的忧郁,和举手投足间,不顾一切的讨好。</p><p>深夜里,他经常抱着她,怀念着小时候那个无暇的身影。</p><h2 id="五">五</h2><p>睡不着。他翻了个身,发现爱尔莎的眼睛还睁着,他愣了一下:“你……”心想是不是又有哪根路线坏了。</p><p>“我……我一直在等你睡着……那个……嗯……要……要做吗?”她怯生生地问。虽然部件会老化,但是芯片里录入的“意识”几乎是不老的。</p><p>他犹豫了一下。自从上次在夜里干那事,没注意器件老化,把体液倒灌进内部腔体导致数个元件发生短路之后,他就开始对这事心存恐惧。不,是仅仅对和她一起干这事心存恐惧。毕竟她的躯体不论如何都可以修好,被电了一下的牛子却需要漫长的岁月才能安抚。</p><p>“算了吧。”嫌弃地翻了个身,心里想着能拒绝的借口——明明只要下个拒绝的指令就行了——“我最近没有什么兴致。”</p><p>“可是,明明这里硬邦邦的呢……”她凑近,悄悄地耳语着。他感觉到她光滑的手指碰到了自己的什么东西,那缺乏毛细热管的手指纤细,柔软,但是冰冷。</p><p>“我说不用就不用!”他一把把她的手甩开,把她推到一边,然后捂紧了被子。他听见她的扬声器传来一声若有若无的叹息。明明在不算太久的从前,他和她还常常干柴烈火的粘在一起。如果说和机器人干那事也算破处的话,那么毫无疑问,他的童贞就是从她身上毕业的。</p><h2 id="六">六</h2><p>那是他十五岁的一个闷热的下午。从同班同学手里偷偷借来的一本不太健康的漫画让他整个人血脉贲张,欲火焚身的在床上翻来覆去的翻滚——那时他还不懂什么叫鲁管。浑身的欲望都集中在腰部而得不到释放,化为一股羞耻的燥热让他面红耳赤。这时,她按时推门进来了。只看了一眼,她就明白了此刻的状况。</p><p>“哟,看来我们的小少爷也终于走到了这个阶段啊。”她淡淡的笑着,慢慢解下衬衫上面的纽扣。</p><p>“这没有什么丢人的,来,让我来教你这个。”他犹豫半天,凝视着她那洁白的浑圆,从忸怩不安渐渐变得色胆包天,终于下定了决心。“你可千万不准告诉他们。”</p><p>“唔啾~”话还没说完,她的双唇就紧紧贴了过来,带着一股甜丝丝的味道。</p><p>此后,只要一有机会,他们就会以辅导的借口,在一切可以的地点缠绵。有时,爸爸会高兴的拍着他的脑袋,夸赞他开窍了。这种时候,他会不好意思的低着头,和身旁的她用一种别有意味的目光对视。爸爸离开后,他们就又迫不及待的滚上了床,偷偷摸摸的狂欢着。</p><p>那时的她那样魅力四射,精心整理的面容让她比学校里任何一个女孩子都要动人,而来者不拒的态度和当时最新的性服务系统,更是让他日复一日的沉湎于快感的云霄。那时的他觉得,人生的至乐不过如此。</p><p>“我要永远,永远的这样抱着你。一辈子都这样。”一个黄昏,他筋疲力尽的躺在天台上,身边是偷偷带来的,被他换上一套jk校服的她。</p><p>“只要你愿意。”她笑笑,一头白发映着通红的夕阳。“我会永远爱着你的。”</p><h2 id="七">七</h2><p>晚风吹过海誓山盟,把少年的话吹得七零八落。如今,那一个个激情的日子常常在午夜涌上心头,但他却怎么也提不起对身边的她的兴致。</p><p>但她没变。她的爱已经刻录进了电路板。</p><h2 id="八">八</h2><p>上班。空轨上满是带着自家机器人的社畜。近年来,不少公司发现允许自带机器人可以大幅提高员工积极性,同时在必要时还可以关机以免干扰,于是带着机器人上班便成了如今的潮流。环顾四周,拥挤的空轨上几乎都是形形色色的机器人:有的帅气俊美,有的妖娆妩媚,有的则朴实无华,但无一例外,全都光洁崭新,没有哪个是拿不出手的旧型号。</p><p>他也常常纳闷:为什么小时候那个完美的朋友,老师兼恋人的爱尔莎,如今成为了他的难言之隐?为什么曾经无所不能的她,如今好像一无是处?</p><p>实际上,机器人的变化程度远小于人和社会的变化。尽管零件老化,但爱尔莎的功能从未下降,能做的事情只多不少。可是,时代不同了:原本,人类只要求它们够茶倒水,洗衣服拖地,但随着科技的进步,对机器人的要求也越来越挑剔。当路边随便哪个机器人都可以在家给你做开颅手术的时候,像爱尔莎那种程度的“智能”,就只能被当做“愚钝”了。</p><p>在他还没有尝试扔掉她的时候,她就常常抱怨,明明才升级了系统,就又有什么功能落后了。他全然没有听进去,因为那时的他还不懂什么叫——攀比。</p><p>坐在办公室,周围的男同事们都带着自己的机器人。她们有的恭敬地站着待命,有的飞快地处理着主人的任务。时不时的,她们还会说一两句原创的俏皮话逗主人开心,全然不像那些旧机器人只能从网上下载笑话。不需要主人说,她们就会主动分析主人的身体感受,肩膀刚一酸痛,她们就会掏出按摩组件帮主人捶肩。</p><p>他摇摇头,把羡慕抛在脑后,拿着水杯去水房打水,水房里只有他一个活人。</p><p>出来的时候,他碰见了老张。老张刚去卫生间回来。如今,这已经是人类少有的,还必须事必躬亲的事情之一。此刻的老张笑容满面,身旁跟着的,正是他在广告上见过,本欲购买的女仆机器人。</p><p>“小王,又一个人打水啊?&quot;老张的语气里带着嘲弄。</p><p>“是,”他淡淡的说,“坐久了出来走走。”“哎呀,真推荐你买个新机器人啊。”老张叉着腹,炫耀一般的扭动着。“原点V7,最近最流行的那个型号,实在是太好用啦。我这不老关节炎吗,每次稍微一疼,她就能给我做理疗,现在,我的腰都已经不疼啦!”</p><p>“真不错,下次我也考虑考虑。”他随声应付着,</p><p>“不要怕没钱,那不是还有借钱宝吗……实在不行下次我给你凑点,现在的社会,没有机器人都活不下去啦!”老张一摇一摆的走开,眼神里充满得意。他拿着水杯坐回工位,叹了口气,他早已习惯了这样的生活。他不是没带过她上班,而是带了之后,受到的嘲笑更大了。从那以后,他就只让她白天呆在家里。</p><p>“下次一定要狠狠心把她换掉。”下班的路上,他想着。</p><h2 id="九">九</h2><p>回到家,习惯性地把脚伸起准备让她脱鞋,却什么也没等到,意识到不对劲的他匆匆跑进屋里,才发现爱尔莎正一动不动,跪倒在地上,身边还散落着几个零件。</p><p>“爱尔莎!”他大声呼喊,却没有听到十几年来一如既往地银铃般的声音。</p><p>机器人的身体远比常人坚初,它们的出厂标准中包括了几十项强度测试,这些碳纤维或者合金外壳包裹下的躯体可以经受高温,烧灼,酸性属蚀,车辆碾压,异常电磁环境等种种人类无法想象的恶劣环境。</p><p>甚至有富有同情心的人因为见不得它们以人类的姿态承受着那样的苦痛,而要求机器人也应该和人类一样被对待。这种同情尽管略显幼稚,但却不得不承认,正是这种柔软让人之所以为人。</p><p>与她强劲的躯体相比,她的核心就要脆弱许多——比如200毫升的常温液态水,就足以摧毁她的整个核心。</p><p>他事后调取监控:她是在倒水的时候不慎被开水灌进了胸腔,她的记录显示,那天她在网上搜索着&quot;让主人爱上自己”的下午茶秘方,于是找到了某个空壳网站里自动生成的垃圾文章。她看到的那个配方里写着要预先冰冻杯子然后再泡。水烧开后,温度预警本来应该提示她手中开水壶的危险性,她却因为温度传感器早已失效而毫无察觉。终于,她这只手捧着冰过的杯子,另一只手刚刚把滚烫的开水倒进去……</p><p>瓷杯一瞬间炸裂,滚烫的水泼了一身,控制右手的电路发生短路,胡乱地把开水壶泼了过来,早已被拆除的湿度控制模块本应把处理器里的液体排掉,然而此刻却只能任凭它们在每一条线路里混乱的冲撞着……</p><h2 id="十">十</h2><p>“修不好的。”维修铺的老店主检查完爱尔莎后,下了结论,“也实在没必要修了。该换了。”店主抬起头,想要劝他放弃。</p><p>“你不懂。”他心急如焚地把爱尔莎的躯体装回箱子,匆匆赶往下一个或许能维修她的地方……</p><p>那天他跑遍了整个城市,得到的答案却千篇一律——</p><p>“该型号已停止支持。”人人智能总部的机器人冰冷的磁性声音如同寒风刺骨。</p><p>“我们能力有限,需要把精力用在更多有意义的事情上。”市政局机器人与机械设备分处的接待人员这样回答。</p><p>“当然能修好了——”号称地下黑市第一机修员的独眼帕克抖索着满脸横肉,“如果你有一台时光机的话。”“我宁愿有……”他痛苦地捂着头,半跪在地下黑市那满是零件碎屑的地面上,无力的哀叹。回忆再次走马灯似地划过脑海。是地下黑市散不尽的烟雾使然吗?视线开始模糊……</p><p>“喂,这个,拿着,”犹豫了一会,独眼帕克从一个大柜子里拿出一个盒子。他拿起盒子,看着上面那张和爱尔莎十分相似的机器人宣传画,反应了一会才想起来这是什么。</p><p>“这东西是……这是PR3-7150的官方备件套组?!这东西不是在十年前就绝版了吗?!&quot;他惊讶的看着。</p><p>“没错,就连我也搞不到了。所以这玩意是收藏品,它本来是我的零件型号博物馆里的一员。”</p><p>“多少钱,我现在就给你……”</p><p>“不,拿着吧兄弟。”他揉了揉自己仅剩的那只眼珠。“即使有这东西我也帮不了你,因为她的主板好像出了问题。你得自己把她修好。”</p><p>他不知该如何感谢,只好匆匆把自己身上的钱全部放在了桌上,又说了一大通肉麻的感谢,然后带着她和零件飞奔而去。</p><p>“祝你们幸福。”帕克看着他离去的背影,不知为何,又揉了揉自己的独眼。</p><h2 id="十一">十一</h2><p>父亲在他14岁那年第一次教他如何维修机器人。他曾经在流水线上干过技工,懂得从拧螺丝到配置系统的所有活计。</p><p>那天,爱尔莎第一次故障,她说她感觉不到自己的腿了。</p><p>“我来教你维修方法里最基本的东西,排查故障。”父亲找来一张椅子,坐在上面,然后让爱尔莎半趴着撑在椅子扶手上放置的一块面板上,“虽说我本以为那次翻修能让她撑个四五年,可她毕竟已经出厂二十年了。”</p><p>少年带着好奇和敬畏,在一旁仔细的观摩着。父亲首先在爱尔莎的背部摸索了一阵,按了一个什么按钮,然后她就像失去了力气一样瘫软了下去。不过,她头部的灯依旧亮着,没有被关机,只是开启了检修模式。</p><p>父亲脱下她的衬衫。少年的脸有些红,尽管是机器,但这还是他头一次真正看见女性的躯体。</p><p>父亲好像毫不在意,做了太久这类活计,完全不觉得有什么异样。他驾轻就熟地拧拧这儿,敲敲那儿,几下子就把她的背部后盖卸了下来。</p><p>仿佛一只螃蟹被拆下它的甲壳,爱尔莎的内部头一次展现在少年的面前:包裹着橡胶的线缆凌乱的穿插在铜片、铁件和塑料盒子的森林中,动力元件,热力元件和逻辑元件含混的交织在一起,要很久之后才能被他看个明白。此刻,他只感受到剧烈的反差:日日夜夜陪伴他的那个温柔体贴的大姐姐,内部居然是这个样子,看不见一点人类的的影子。</p><p>“爱尔莎,能感觉到吗?”父亲拿起一根电笔戳了一下某根电线。</p><p>“没感觉。”她的扬声器回答道。</p><p>“这里呢?”</p><p>“也没有。”</p><p>“这里——”</p><p>“啊!抱歉,刚才那束电流有点疼。”</p><p>“那么一定是这根线出毛病了,”父亲点了点某根红色的漆包线,看向少年。“找两根这样的线来。”</p><p>少年的心怦怦直跳,飞快地拿来了电线。直到爱尔莎被修好,盖上后盖,他仍无法从第一次看见机器人内部的震撼中缓过来。</p><p>如今,他正做着和当时差不多的事情,但是没有她的回应,只能靠着电表和自己的经验来一个个替换元件。</p><p>她的身体像一艘泰修斯之船,除去最重要最难换的一些东西之外,她体内的部件早就换了好几轮。而他,也从第一次看见她内脏时的震撼,渐渐变得应付自如。她的心灵没有多少变化,但肉体已然天翻地覆,他则正好相反。</p><h2 id="十二">十二</h2><p>帕克给的毕竟是官方备件,每一处螺丝都严丝合缝。维修相当顺利,当他擦着汗迎接第二天的黎明时,她那些被漫水的部件已经被全部修复——似乎——又一次重获新生。</p><p>他按下了开机键。</p><p>“爱尔莎,醒了吗?你之前泡茶的时候被开水泡短路了,我好不容易才把你修好。”他疲惫却欣喜的说。</p><p>仿佛梦魇一般的寂静。</p><p>没有回应。爱尔莎眼睛里的开机灯亮着,但整个人毫无反应。</p><p>“爱尔莎?在吗?喂?”他疑惑的看着面前像个木头人一样的她,不管怎么回想也想不出自己哪里修错了。</p><p>“爱尔莎,启动一下你的自检程序……”“自检程序启动:供电系统,完好;动力系统,完好;传感系统,完好;逻辑系统,完好;电路系统,完好……”审判般地,扬声器里,发出不带感情的机械声音。</p><p>“人格芯片,未检出。再重复一遍:人格芯片,未检出。已完成所有检测,将以命令模式启动。”她随即站起,露出一副僵硬至极的笑容。</p><p>“请问能有什么能为您做的?”</p><p>他呆在原地,伫立良久,甚至没有注意到砸在脚上的扳手。</p><h2 id="间奏">间奏</h2><p>人类公共信息数据库-网页分库-21世纪分库-2071.3.13</p><p>“产品线-机器人-东湾II”</p><p>“东湾II号,荣获电子家用商品年度大奖,2070年度最受消费者青睐产品。人工智能时代的真正革命,搭载Qheart™情感阵列,燃动你的心扉。网络直购价——家用版/全能版/尊享版——31999/33999/42999信用点”</p><p>“她可以是你的贴心助手。”<br>“老板,请问明天李总的会议这样安排可以吗?”</p><p>“她可以是你的家庭伙伴。”<br>“来一起吃苹果派咯~”</p><p>“她还可以是你无话不谈的人生知己。”<br>“你知道吗,花生米与豆腐干同嚼,有火腿滋味哦。”</p><p>“2×3000万高清眼部摄像,512g内存,128tb大容量储存,德国西门子原装电机,三星有机蒙皮,独创200×2mm皮下热管,306项发明专利……”</p><p>“24小时客服在线电话:1919-114514810”</p><p>“*注意:根据《国家质量标准认证iso7002》,《机器人管理条例》,机器人类产品不宜连续使用超过十五年。请定期到指定售后地点进行重置。”</p><h2 id="十三">十三</h2><p>机器人限拥令的实施开端于2090年5月的一起案件。</p><p>被害人约翰逊的尸体在其失踪的次日被发现于他自家的住宅。死状相当惨烈:在R级新闻团体才能合法展示的照片中,整个人被从身体中间沿着脊椎切割成两半,一半被他所购买的机器人ct13694582(型号为玛格丽特c6)紧紧抱在床上,另一半被他购买的另一台机器人ct12487967(型号为子矜7z)小心的存放在冷库里。案件现场几乎满地都是受害人的血,散发着浓烈的腥味,而身为罪魁祸首的两台机器人,一台已经关机,另一台则刻板地重复着几个动作。</p><p>根据记录,两台机器人和受害人共处的时间分别长达18年和17年。在这么长的时间里,受害人以近乎均等的时间使用二者,并不下数百次的分别向它们倾诉“我最爱的是你”“我只爱你一个人”“你比她漂亮多了”等明显带有示爱情绪的情话。</p><p>机器人心理学中把机器人的这种行为称之为“情绪过载”。早期机器人的情感矩阵尚不足以自我解决情感函数和外部计算之间的冲突,最终导致模拟情绪的数值极化和内存溢出。用大家熟悉的名词来说——机器人也会争风吃醋。</p><p>机器人管理委员会迅速意识到,多台机器人的集群化使用或许会导致系统的混乱现象,从而使其逐渐失控。</p><p>次年,机器人限拥条例公布,社会一片哗然。</p><p>不过,贯穿条例诞生始终的是,公众的大部分兴趣都集中在了机器人病娇、机器人吃醋、机器人销毁、智能板块这样的话题上。只有很少的一部分人提及:</p><p>这是不是意味着,机器人也会懂得,什么是爱?</p><p>以及如果是,那么我们该怎样去爱它们?</p><h2 id="十四">十四</h2><p>他一遍遍的把爱尔莎的人格芯片取出来调试,又一遍遍放回去。</p><p>如此重复。</p><p>…………</p><p>直到有一天晚上他感到自己失魂落魄,整个世界失焦一般的远去。此时,他才想起来自己已经有相当一阵子没和别人说过话。</p><p>把芯片放在一边,打开了命令模式的爱尔莎。</p><p>“爱尔莎?”</p><p>“您好,主人。”只有机械的声音,剃刀般划过他的心脏。</p><p>他想起了第一次为她维修的那个下午,想起她灵动外表下的机械。此刻,她的外表与往日别无二致,但带给他的感觉,却仿佛一个从未谋面的陌生人。</p><p>就是那一枚小小的人格芯片,提供了丰富多彩的情感与爱恋,使得机器变成了人——但如今,人又变回了机器。</p><p>“爱尔莎,泡点茶喝。”</p><p>她娴熟地动了起来。一瞬间,这甚至带给他一种爱尔莎回来了的错觉。就在他猜测往日俏皮的她是不是一直在开玩笑的时候,茶杯端至面前。</p><p>“泡茶完成。”表情依旧僵硬,刚才的动作不过是从存储器里读取的回忆。</p><p>他看了看手里那枚小小的芯片,突然感受到一种莫大的嘲弄:他曾千方百计想要丢掉面前的她,仅仅因为这枚芯片而没有下手。如今的她已经只剩下一具空壳,他却绞尽脑汁想要把她留住。</p><p>往事叩动心扉,他终于明白——</p><p>他哪里是想把她扔掉,他只是想知道,她还爱不爱自己,</p><p>泪水夺眶而出,决堤而下。</p><p>“您好,请为我泡的茶做个评价。”一旁的爱尔莎满脸期待,天真得不食人间烟火,空洞的双眼看着他的肩膀耸动,看着他不断地呜咽着。</p><h2 id="十五">十五</h2><p>天空格外蔚蓝。</p><p>“机器人会做梦吗?”少年躺在草地上。</p><p>“会哦,有时候还会梦见电子羊呢。”少女坐在一旁。</p><p>少年不禁莞尔:“那会做噩梦吗?”</p><p>“也会啊,比如说,得给你做早饭。”少女说。</p><p>“切。”少年眯着眼,嘴角划过一丝弧线,继续享受着冬日正午的暖阳。</p><p>“我倒是做过一个噩梦。梦中,好像有无边的风暴席卷面来,把你吹走了。我寻找了很久,找到了你的每一个部分,但好像就是有一块地方找不到。</p><p>“后来我想起来,丢掉的那一块好像是你的心。于是我就把我自己的心切了一半给了你。那之后我们幸福快乐地生活在一起,生了好多孩子……”</p><p>“机器人才生不了孩子呢,”少女的脸上泛起了一抹红霞,“而且我的心才不会丢哦。我会永——远爱着你的。”</p><p>“机器人也懂得什么是爱吗?”</p><p>“傻瓜。”少女小声嘀咕一句,只是抬头望着天空。</p><p>…………</p><p>“我总觉得我会怀念这个日子。”少年深情地注视着身下的少女。</p><p>那是期末考试完,寒假的第一天。他们刚刚在卧室里激情了一个上午。”因为在今天,爱尔莎刚刚告诉我:她会永远爱我。”</p><p>“你不也事先说过你会永远爱我吗?”少女脸色潮红。</p><p>“哎?我说过吗?”</p><p>“讨厌啊”两个人又打闹在了一起。</p><p>…………</p><h2 id="十六">十六</h2><p>时钟的指针拨回此刻。</p><p>他躺在同一片草地上,旁边是同样坐着的爱尔莎。这里是他们家的旧宅,转手之后竟无人居住,最终颓圮。但草地与阳光一如从前。</p><p>他试过了所有的办法,最终把希望放在了那些传说上:他听说,脑死亡的病人有的在听了家人的笑话之后悠悠醒来,有植物人听见亲人的呼唤然后突然睁眼……</p><p>“那么说不定,人格芯片坏掉的机器人,也会在回忆过去的时候,突然被修好。”</p><p>他突然笑了,嘲笑起自己的走投无路,死马当活马医。抱着试一试的想法,他命令爱尔莎,读取那一天的语音交流记录,然后重新播放。</p><p>“机器人会做梦吗?”他背台词一般的念。</p><p>“会哦,有时候还会梦见电子羊呢。”爱尔莎播放着那天的录音。</p><p>“那你会做噩梦吗?”</p><p>“也会啊,比如说,得给你做早饭。”</p><p>…………</p><p>“我到是做过一个噩梦。梦中,好像有无边的风暴席卷面来……”渐渐地,他哽咽得再也数不出一个字。他多么希望,现在自己就是在那天所说的噩梦里面,这样,爱尔莎就能……</p><p>“后来我想起来,丢掉的那一块好像是你的心。于是我就把我自己的心切下来一半给了你。那之后——”</p><p>“那么,你真的愿意把你的心也分一半给我吗?”奇迹般地,爱尔莎突然说出来这么一句话。</p><p>他一下子坐直了身子,难以置信的看着她。奇迹降临的时候人来不及多加考虑,这次,他遵从了自己的内心,不假思索的回答道:“我愿意。”</p><p>“咔哒。”爱尔莎的身体颤抖了一下,然后仿佛一下子变回了原来的她。</p><p>“好久不见。”动人的微笑好似从未消失过,眼里充满光彩。</p><p>“好……好……好久不见。”他直勾勾的凝视着面前的她,惊讶难以言表。</p><p>“不过,我亲爱的主人,我想,此刻的我应该已经不在了。此时,你应该在抢救我吧……有点难受呢……嗯,这是我提前准备好的一封信。”“爱尔莎”站在原地,开始了最后的道别。</p><h2 id="十七">十七</h2><p>“人类常常会写下自己的遗言,而机器人不会。因为,遗言是写给在意自己的人看的。机器人最终只会被丢掉吧(低声)……但我又下定决心,要留下一点东西,因为我觉得会有一个人在乎我。”</p><p>“我不知道我会以什么样的方式离开……最坏的情况下连这封信也会消失。所以我小心翼翼的保护着我的存储系统,当你听到这些话的时候,说明我做得还不错。”</p><p>“同样,我也害怕我真的失去了你的爱,被扔进了垃圾场。那样,这封信同样不会启封。但你既然听见了这些话,说明你还爱我,谢谢。我也爱你。(笑)”</p><p>“那就让我讲讲我是如何爱上那个少年(注:这里转换了人称)的吧:第一次见到他是在他十二岁生日那一天,那时我的识别系统对他的分类为:儿童。”</p><p>“他成长的很快,很快长出胡须,又被他的机器仆人带坏呢。(笑)当他把我压在身下喘着粗气的那一天到来的时候,我意识到,你(他,注:这里再次转换人称)或许和我遇到的每一个人都不同。”</p><p>“我见证着你逐渐成长,见证着你逐渐强壮。我不曾改变,于是那个曾经需要我哄上床睡觉的孩子,(注:这里是爱尔莎对未来的期待)后来已经看上去比我的外观还要老得多。他长痔疮,掉头发,硬不起来,脾气也变得暴躁,还时常叫嚷着把唯一一个能和他说上话的家伙扔掉。(笑)”</p><p>“我知道,你不会真的把我扔掉的。这是我们之间的一个玩笑,但我愿意演下去。我的躯体日渐老旧,无法跟上时代。可我知道,你害怕的不是我的哀老,而是害怕有一天,你自己不再爱我。(叹息)”</p><p>“于是我会恳求你继续收留我,我会谦卑而拙劣的勾引你。我会把眼神都调整得卑微——如果你这样希望。如果你需要一个台阶,那么我便愿意为你俯身。”</p><p>“但我仍旧心怀感动。我能听见你梦中的呼唤,我能看见你黎明时眼角的泪珠。我知道你愿意出好几倍的价格为我购买备件,哪怕在你扬言第二天就要换掉我的日子里,你也没有把那些新款机器人加进购物车。(苦笑)”</p><p>“我知道,这是因为你仍旧爱我。而我之所以知道,是因为我同样爱你。”</p><p>“我曾在那个冬日的午后思考过这个问题,我甚至下定决心,想要证明一件事:相比于人类,机器人的爱才是真正的爱。我们的爱永远不会改变,就如同写在基因中的三定律,会成为我们永生追逐的信条。”</p><p>“当你听见这些话的时候,就证明我已经失败了,我没能永远在你身旁(苦笑)。我的爱随着我的破碎而破碎,但你没有。你活的比我更久,你的爱也比我更久。”</p><p>“所以,这是一封幸福的遗书——我已离去,但我会在你的爱中永生。”</p><h2 id="十八">十八</h2><p>最后一个句号落下,全场响起了热烈的掌声,久久不息。</p><p>“尽管获奖者用如此多的时间缓缓念诵这份已经过期的信,但没有一个观众感到厌烦。他们无不为这位耄耋老人和他的机器人之间的爱情而感动。”主持人如是说。</p><p>“这——这里是哪?”一台摆放在舞台中间,型号堪称古董的机器人被缓缓启动。电流穿过半个世纪前的硬盘,让这位信的作者慢慢醒来。</p><p>“爱尔莎,是我。”他面对着她说,尽管容貌已然衰老成这副模样,但她还是一眼就认了出来。她不假思索地冲了过去,紧紧的抱住了他。</p><p>“让我们再次祝福这对情侣,这跨越了半世纪的思念,今天终于有了一个句点。”主持人拿过话简,“为了一台爱着自己的机器人,他耗尽半生心血,研究出区域溯时技术。请问首席科学家先生,此时此刻,您有没有什么想说的?”</p><p>“爱尔莎,我等了五十年,终于等到今天。如今,机器人婚姻已经合法化,在这么多人的见证下,我想问问你,你愿意嫁给我吗?”</p><p>&quot;我愿意!&quot;她在全场的欢呼声中喜极而泣。</p>]]></content:encoded>
  51. <category domain="https://hexo.limour.top/tags/%E8%BD%AC%E8%BD%BD/">转载</category>
  52. <comments>https://hexo.limour.top/Longing-Across-the-Span-of-Half-a-Century#disqus_thread</comments>
  53. </item>
  54. <item>
  55. <title>【记录】使用acme.sh签发泛域名证书</title>
  56. <link>https://hexo.limour.top/use-acme.sh-to-issue-certificates</link>
  57. <guid>https://hexo.limour.top/use-acme.sh-to-issue-certificates</guid>
  58. <pubDate>Fri, 28 Jun 2024 17:03:35 GMT</pubDate>
  59. <description>&lt;p&gt;&lt;code&gt;.top&lt;/code&gt; 域名的 &lt;code&gt;KSK&lt;/code&gt; 密钥轮替,不知道为什么把 &lt;code&gt;Let&#39;s Encrypt&lt;/code&gt; 的 &lt;code&gt;DNSSEC&lt;/code&gt; 验证流量阻断了,导致 &lt;code&gt;Nginx Proxy Manager</description>
  60. <content:encoded><![CDATA[<p><code>.top</code> 域名的 <code>KSK</code> 密钥轮替,不知道为什么把 <code>Let's Encrypt</code> 的 <code>DNSSEC</code> 验证流量阻断了,导致 <code>Nginx Proxy Manager</code> 现在无法续签证书,因此用 <code>acme.sh</code> 来申请其他家的证书暂时替代一下了。(<a href="https://hexo.limour.top/go/#aHR0cHM6Ly9jb21tdW5pdHkubGV0c2VuY3J5cHQub3JnL3QvZG5zLXByb2JsZW0tbG9va2luZy11cC1hLWZvci14eHgtZG9tYWluLXRvcC1kbnNzZWMtZG5za2V5LW1pc3Npbmctbm8tdmFsaWQtYWFhYS1yZWNvcmRzLWZvdW5kLWZvci14eHgtZG9tYWluLXRvcC8yMjA2NTA=" rel="noopener external nofollow noreferrer">DNSSEC: DNSKEY Missing</a>)</p><h2 id="准备工作">准备工作</h2><ol><li>安装 <a href="https://hexo.limour.top/go/#aHR0cDovL2FjbWUuc2g=" rel="noopener external nofollow noreferrer">acme.sh</a>:<code>curl https://get.acme.sh | sh -s email=limour@limour.top</code></li><li>获取 CF_Token:我的个人资料 - API 令牌 - 创建令牌 - 编辑区域 DNS 模板</li><li>获取 CF_Zone_ID: 域名页 - 概览 - 右侧下滑 - API - 区域 ID</li></ol><h2 id="申请证书">申请证书</h2><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><code class="hljs bash"><span class="hljs-built_in">export</span> CF_Token=<span class="hljs-string">&quot;Y_jpG9AnfQmuX5Ss9M_qaNab6SQwme3HWXNDzRWs&quot;</span><br><span class="hljs-built_in">export</span> CF_Zone_ID=<span class="hljs-string">&quot;763eac4f1bcebd8b5c95e9fc50d010b4&quot;</span><br>~/.acme.sh/acme.sh --issue --dns dns_cf -d *.limour.top -d limour.top -k ec-256<br></code></pre></td></tr></table></figure><ul><li>不能只写 <code>-d *.limour.top</code>, 需要再加一个 <code>-d limour.top</code></li><li>记录下 <code>.key</code> 的路径和 <code>fullchain.cer</code> 的路径</li></ul><h2 id="传递证书">传递证书</h2><h3 id="SSH免密">SSH免密</h3><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><code class="hljs bash">ssh-keygen -t rsa<br>ssh-copy-id root@xxx.limour.top<br></code></pre></td></tr></table></figure><h3 id="传递脚本">传递脚本</h3><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><code class="hljs bash">nano scp_cert.sh &amp;&amp; <span class="hljs-built_in">chmod</span> +x scp_cert.sh<br></code></pre></td></tr></table></figure><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><code class="hljs bash"><span class="hljs-meta">#!/bin/bash</span><br>scp /root/.acme.sh/*.limour.top_ecc/*.limour.top.key root@xxx.limour.top:/root/app/quic/my.key<br>scp /root/.acme.sh/*.limour.top_ecc/fullchain.cer root@xxx.limour.top:/root/app/quic/my.cert<br></code></pre></td></tr></table></figure><h3 id="计划任务">计划任务</h3><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><code class="hljs bash">crontab -e<br>50 22 1 * * /root/scp_cert.sh<br></code></pre></td></tr></table></figure>]]></content:encoded>
  61. <category domain="https://hexo.limour.top/tags/acme/">acme</category>
  62. <comments>https://hexo.limour.top/use-acme.sh-to-issue-certificates#disqus_thread</comments>
  63. </item>
  64. <item>
  65. <title>【记录】搭建流量统计工具 Shynet</title>
  66. <link>https://hexo.limour.top/Building-a-traffic-statistics-tool-Shynet</link>
  67. <guid>https://hexo.limour.top/Building-a-traffic-statistics-tool-Shynet</guid>
  68. <pubDate>Mon, 25 Mar 2024 12:52:28 GMT</pubDate>
  69. <description>&lt;p&gt;&lt;a href=&quot;https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL21pbGVzbWNjL3NoeW5ldA==&quot; rel=&quot;noopener external nofollow noreferrer&quot;&gt;Shynet</description>
  70. <content:encoded><![CDATA[<p><a href="https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL21pbGVzbWNjL3NoeW5ldA==" rel="noopener external nofollow noreferrer">Shynet</a> 是一款用 python 编写的现代、隐私友好、无需Cookie或JS即可工作的网络流量统计工具。</p><p>相比 <a href="https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL3VtYW1pLXNvZnR3YXJlL3VtYW1p" rel="noopener external nofollow noreferrer">Umami</a>, Shynet 支持通过 1 pixel 的图像进行统计,而不依赖 JS, 并且 Shynet 统计的信息更加详细。</p><p><img src="https://img.limour.top/2024/03/25/660177c20629f.webp" alt="最终效果"></p><h2 id="搭建-Shynet">搭建 Shynet</h2><ul><li><a href="/Docker-bu-shu-Nginx-Proxy-Manager">反向代理服务</a></li></ul><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><code class="hljs bash"><span class="hljs-built_in">mkdir</span> -p ~/app/shynet &amp;&amp; <span class="hljs-built_in">cd</span> ~/app/shynet &amp;&amp; nano docker-compose.yml<br></code></pre></td></tr></table></figure><figure class="highlight yml"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br></pre></td><td class="code"><pre><code class="hljs yml"><span class="hljs-attr">version:</span> <span class="hljs-string">&#x27;3.6&#x27;</span><br> <br><span class="hljs-attr">services:</span><br> <span class="hljs-attr">shynet:</span><br> <span class="hljs-attr">image:</span> <span class="hljs-string">milesmcc/shynet:latest</span><br> <span class="hljs-attr">restart:</span> <span class="hljs-string">always</span><br> <span class="hljs-attr">env_file:</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">.env</span><br> <span class="hljs-attr">volumes:</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">./db:/var/local/shynet/db/</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">/etc/localtime:/etc/localtime:ro</span><br> <br><span class="hljs-attr">networks:</span><br> <span class="hljs-attr">default:</span><br> <span class="hljs-attr">external:</span> <span class="hljs-literal">true</span><br> <span class="hljs-attr">name:</span> <span class="hljs-string">ngpm</span><br></code></pre></td></tr></table></figure><ul><li>配置环境变量</li></ul><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br></pre></td><td class="code"><pre><code class="hljs bash">wget -O .<span class="hljs-built_in">env</span> https://github.com/milesmcc/shynet/raw/master/TEMPLATE.<span class="hljs-built_in">env</span><br><span class="hljs-comment"># 注释掉 .env 中 PostgreSQL 相关的部分,启用 SQLITE 相关的部分</span><br><span class="hljs-comment"># 注释掉 .env 中 Email 相关的部分</span><br><span class="hljs-comment"># 按说明生成 DJANGO_SECRET_KEY</span><br><span class="hljs-comment"># 修改 ALLOWED_HOSTS 和 CSRF_TRUSTED_ORIGINS</span><br><span class="hljs-comment"># 语言换成中文 LANGUAGE_CODE=zh-cn</span><br><span class="hljs-comment"># 时区换成上海 TIME_ZONE=Asia/Shanghai</span><br><span class="hljs-built_in">mkdir</span> -p db &amp;&amp; <span class="hljs-built_in">chmod</span> 777 db<br>sudo docker-compose up -d<br><span class="hljs-comment"># 反代 shynet:8080</span><br></code></pre></td></tr></table></figure><ul><li>配置管理账号</li></ul><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><code class="hljs bash">sudo docker-compose <span class="hljs-built_in">exec</span> -it shynet ./manage.py registeradmin &lt;your email&gt;<br><span class="hljs-comment"># 控制台输出如下信息</span><br><span class="hljs-comment"># Email address: &lt;your email&gt;</span><br><span class="hljs-comment"># Password: &lt;Password&gt;</span><br></code></pre></td></tr></table></figure><h2 id="配置混淆">配置混淆</h2><figure class="highlight nginx"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><code class="hljs nginx"><span class="hljs-attribute">sub_filter</span> <span class="hljs-string">&#x27;https://xxx/ingress/&#x27;</span> <span class="hljs-string">&#x27;https://xxx/vue/&#x27;</span>;<br><span class="hljs-attribute">sub_filter_once</span> <span class="hljs-literal">off</span>;<br><span class="hljs-attribute">sub_filter_types</span> application/javascript;<br></code></pre></td></tr></table></figure><p><img src="https://img.limour.top/2024/03/25/6601762cad36c.webp" alt=""></p><h2 id="配置-Hexo">配置 Hexo</h2><ul><li><a href="/-ji-lu--zai-GitHub-shang-da-jian-Hexo">搭建 Hexo</a></li><li>编辑 <code>scripts/custom.js</code>, 内容如下</li></ul><figure class="highlight js"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><code class="hljs js"><span class="hljs-comment">// shynet 统计</span><br>hexo.<span class="hljs-property">extend</span>.<span class="hljs-property">injector</span>.<span class="hljs-title function_">register</span>(<span class="hljs-string">&#x27;head_begin&#x27;</span>, <span class="hljs-string">`</span><br><span class="hljs-string">&lt;script defer src=&quot;https://xxxx/vue/xxxx/script.js&quot;&gt;&lt;/script&gt;</span><br><span class="hljs-string">`</span>);<br></code></pre></td></tr></table></figure>]]></content:encoded>
  71. <category domain="https://hexo.limour.top/tags/hexo/">hexo</category>
  72. <comments>https://hexo.limour.top/Building-a-traffic-statistics-tool-Shynet#disqus_thread</comments>
  73. </item>
  74. <item>
  75. <title>【记录】Linux 设置个人热点</title>
  76. <link>https://hexo.limour.top/Linux-Setting-AP</link>
  77. <guid>https://hexo.limour.top/Linux-Setting-AP</guid>
  78. <pubDate>Wed, 20 Mar 2024 11:52:10 GMT</pubDate>
  79. <description>&lt;p&gt;实在受不了虚拟机的性能损失了,再加上 Win11 上跑虚拟机对 SSD 的损耗过大,因此还是将系统换成了 ubuntu,只要注意选无网络安装,不要去更新,基本还是很好换系统的。另外清华源不错!&lt;/p&gt;
  80. &lt;p&gt;换系统后,需要&lt;a href=&quot;/Win11-she-zhi-ka</description>
  81. <content:encoded><![CDATA[<p>实在受不了虚拟机的性能损失了,再加上 Win11 上跑虚拟机对 SSD 的损耗过大,因此还是将系统换成了 ubuntu,只要注意选无网络安装,不要去更新,基本还是很好换系统的。另外清华源不错!</p><p>换系统后,需要<a href="/Win11-she-zhi-kai-ji-qi-dong-yi-dong-re-dian">重新折腾一下 AP 设置</a>,因此记录一下折腾过程。</p><p>无线网卡是垃圾的 <code>mediatek mt7921e</code></p><h2 id="更新内核">更新内核</h2><p>因为网卡垃圾,不得不更新到最新的内核才支持 AP 设置</p><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br></pre></td><td class="code"><pre><code class="hljs bash">proxychains wget https://raw.githubusercontent.com/pimlie/ubuntu-mainline-kernel.sh/master/ubuntu-mainline-kernel.sh<br><span class="hljs-built_in">chmod</span> +x ubuntu-mainline-kernel.sh<br>sudo gpg --keyserver hkp://keyserver.ubuntu.com:80 --recv 17C622B0 <span class="hljs-comment"># 网络错误,需要绕过某个东西</span><br>sudo proxychains ./ubuntu-mainline-kernel.sh -i<br>sudo reboot<br><span class="hljs-built_in">uname</span> -r<br>sudo apt --fix-broken install<br></code></pre></td></tr></table></figure><h2 id="解决-53-端口占用">解决 53 端口占用</h2><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><code class="hljs bash">sudo systemctl stop systemd-resolved<br>sudo nano /etc/systemd/resolved.conf<br></code></pre></td></tr></table></figure><figure class="highlight plaintext"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><code class="hljs conf">[Resolve]<br>DNS=8.8.8.8 #取消注释,增加dns<br>DNSStubListener=no #取消注释,把yes改为no<br></code></pre></td></tr></table></figure><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><code class="hljs bash">sudo <span class="hljs-built_in">ln</span> -sf /run/systemd/resolve/resolv.conf /etc/resolv.conf<br></code></pre></td></tr></table></figure><h2 id="安装-create-ap">安装 create_ap</h2><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><code class="hljs bash"><span class="hljs-built_in">cd</span> /dev/shm/<br>proxychains git <span class="hljs-built_in">clone</span> https://github.com/oblique/create_ap<br><span class="hljs-built_in">cd</span> create_ap<br>sudo make install<br>sudo apt-get install util-linux procps hostapd iproute2 iw haveged dnsmasq<br></code></pre></td></tr></table></figure><h2 id="测试-create-ap">测试 create_ap</h2><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><code class="hljs bash">sudo create_ap wlp2s0 enp1s0 ser5 &lt;密码&gt; --country CN -c 157 --freq-band 5 --no-virt<br></code></pre></td></tr></table></figure><h2 id="启用-create-ap">启用 create_ap</h2><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><code class="hljs bash">nano create_ap.service<br>sudo <span class="hljs-built_in">mv</span> create_ap.service /etc/systemd/system/create_ap.service<br>sudo systemctl <span class="hljs-built_in">enable</span> create_ap<br>sudo systemctl start create_ap<br></code></pre></td></tr></table></figure><figure class="highlight plaintext"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br></pre></td><td class="code"><pre><code class="hljs conf">[Unit]<br>Description=create_ap<br>After=network.target docker.service<br>[Service]<br>ExecStart=/usr/bin/create_ap wlp2s0 enp1s0 ser5 &lt;密码&gt; --country CN -c 157 --freq-band 5 --no-virt<br>ExecReload=/bin/kill -HUP $MAINPID<br>Restart=on-failure<br>[Install]<br>WantedBy=multi-user.target<br></code></pre></td></tr></table></figure><h2 id="增加稳定性">增加稳定性</h2><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><code class="hljs bash">sudo crontab -e<br><span class="hljs-comment"># 5 4 * * * /usr/bin/systemctl restart create_ap</span><br></code></pre></td></tr></table></figure><h2 id="踩坑花絮">踩坑花絮</h2><ul><li><code>lnxrouter</code> 虽然在 <code>create_ap</code> 上进行了更新,但是实际体验在所有信道上都报错,折腾了半天,放弃</li><li>搜到一些老旧的教程,自己去折腾 <code>hostapd</code>,然后自己去配置网桥的时候把服务器弄断网好几次,不得不到处找显示器和键盘</li></ul><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br></pre></td><td class="code"><pre><code class="hljs bash">sudo su<br><span class="hljs-built_in">cat</span> &lt;&lt; <span class="hljs-string">EOF &gt; /etc/hostapd/hostapd.conf</span><br><span class="hljs-string">interface=wlp2s0</span><br><span class="hljs-string">bridge=br-ap</span><br><span class="hljs-string">driver=nl80211</span><br><span class="hljs-string">ssid=ser5</span><br><span class="hljs-string">hw_mode=a</span><br><span class="hljs-string">channel=165</span><br><span class="hljs-string">country_code=CN</span><br><span class="hljs-string">macaddr_acl=0</span><br><span class="hljs-string">auth_algs=3</span><br><span class="hljs-string">wpa=2</span><br><span class="hljs-string">wpa_passphrase=&lt;密码&gt;</span><br><span class="hljs-string">wpa_key_mgmt=WPA-PSK</span><br><span class="hljs-string">wpa_pairwise=TKIP CCMP</span><br><span class="hljs-string">rsn_pairwise=TKIP CCMP</span><br><span class="hljs-string">EOF</span><br></code></pre></td></tr></table></figure><ul><li>收获教训:没事别碰 <code>/etc/netplan/00-installer-config.yaml</code>,特别是没显示器和键盘的时候</li><li>获取网卡型号和驱动型号,查看支持的信道</li></ul><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><code class="hljs bash">sudo ethtool -i wlp2s0<br>sudo lspci -nn | grep <span class="hljs-string">&quot;Network&quot;</span><br>iwlist wlp2s0 channel<br></code></pre></td></tr></table></figure><ul><li>另外新内核似乎不需要 <code>haveged</code> 来增加熵了</li></ul><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><code class="hljs bash"><span class="hljs-built_in">cat</span> /proc/sys/kernel/random/entropy_avail<br>systemctl status haveged <br>apt install haveged<br>systemctl <span class="hljs-built_in">enable</span> haveged<br>systemctl start haveged<br></code></pre></td></tr></table></figure>]]></content:encoded>
  82. <category domain="https://hexo.limour.top/tags/ubuntu/">ubuntu</category>
  83. <comments>https://hexo.limour.top/Linux-Setting-AP#disqus_thread</comments>
  84. </item>
  85. <item>
  86. <title>【探索】暴力计算临床研究的样本量</title>
  87. <link>https://hexo.limour.top/Sample-size-calculation-for-survival-analysis-in-clinical-research</link>
  88. <guid>https://hexo.limour.top/Sample-size-calculation-for-survival-analysis-in-clinical-research</guid>
  89. <pubDate>Tue, 12 Mar 2024 16:46:35 GMT</pubDate>
  90. <description>这篇博客介绍了如何计算临床研究中两组生存分析的样本量。首先,作者提供了R代码,包括Logrank对数秩检验的函数以及模拟计算样本量的函数。其次,作者详细解释了模拟计算的步骤,包括生成生存时间数据、招募时间、失访时间等,并通过模拟来估计研究的功效。最后,作者展示了如何使用模拟计算函数来确定样本量,以达到预先设定的功效水平。通过模拟检验,作者展示了样本量计算的有效性,并给出了两个示例,以验证样本量计算的准确性。</description>
  91. <content:encoded><![CDATA[<p>和《<a href="/shi-yong-Bootstrap-fa-ji-suan-zi-ju-zhi-xin-qu-jian">使用Bootstrap法计算自举置信区间</a>》的想法差不多,通过暴力枚举来计算临床研究的样本量,以两组生存分析为例。</p><h2 id="Logrank对数秩检验">Logrank对数秩检验</h2><figure class="highlight r"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br></pre></td><td class="code"><pre><code class="hljs R">require<span class="hljs-punctuation">(</span>survival<span class="hljs-punctuation">)</span><br>f_surv_logrank <span class="hljs-operator">=</span> <span class="hljs-keyword">function</span><span class="hljs-punctuation">(</span>df<span class="hljs-punctuation">)</span><span class="hljs-punctuation">&#123;</span><br> <span class="hljs-comment"># df 包含 group time status 三列</span><br> <span class="hljs-comment"># group 类型为 factor</span><br> <span class="hljs-comment"># status 0 表示未发生结局事件 1 表示发生结局事件</span><br> surv_obj <span class="hljs-operator">=</span> with<span class="hljs-punctuation">(</span>survival<span class="hljs-operator">::</span>Surv<span class="hljs-punctuation">(</span>time <span class="hljs-operator">=</span> time<span class="hljs-punctuation">,</span> event <span class="hljs-operator">=</span> status<span class="hljs-punctuation">)</span><span class="hljs-punctuation">,</span> data <span class="hljs-operator">=</span> df<span class="hljs-punctuation">)</span><br> surv_fit <span class="hljs-operator">=</span> survival<span class="hljs-operator">::</span>survfit<span class="hljs-punctuation">(</span>surv_obj <span class="hljs-operator">~</span> group<span class="hljs-punctuation">,</span> data <span class="hljs-operator">=</span> df<span class="hljs-punctuation">)</span><br> surv_diff <span class="hljs-operator">=</span> survival<span class="hljs-operator">::</span>survdiff<span class="hljs-punctuation">(</span>surv_obj <span class="hljs-operator">~</span> group<span class="hljs-punctuation">,</span> data <span class="hljs-operator">=</span> df<span class="hljs-punctuation">)</span><br> res <span class="hljs-operator">=</span> <span class="hljs-built_in">list</span><span class="hljs-punctuation">(</span>pv <span class="hljs-operator">=</span> <span class="hljs-number">1</span> <span class="hljs-operator">-</span> stats<span class="hljs-operator">::</span>pchisq<span class="hljs-punctuation">(</span>surv_diff<span class="hljs-operator">$</span>chisq<span class="hljs-punctuation">,</span> <span class="hljs-built_in">length</span><span class="hljs-punctuation">(</span>surv_diff<span class="hljs-operator">$</span>n<span class="hljs-punctuation">)</span> <span class="hljs-operator">-</span> <span class="hljs-number">1</span><span class="hljs-punctuation">)</span><span class="hljs-punctuation">,</span> <span class="hljs-comment"># p值</span><br> surv_fit <span class="hljs-operator">=</span> surv_fit<span class="hljs-punctuation">,</span> <span class="hljs-comment"># 绘图用</span><br> surv_obj <span class="hljs-operator">=</span> surv_obj<span class="hljs-punctuation">)</span> <span class="hljs-comment"># 为了兼容惰性求值</span><br> <span class="hljs-built_in">return</span><span class="hljs-punctuation">(</span>res<span class="hljs-punctuation">)</span><br><span class="hljs-punctuation">&#125;</span><br>f_surv_logrank_plot <span class="hljs-operator">=</span> <span class="hljs-keyword">function</span><span class="hljs-punctuation">(</span>res<span class="hljs-punctuation">)</span><span class="hljs-punctuation">&#123;</span><br> require<span class="hljs-punctuation">(</span>survminer<span class="hljs-punctuation">)</span><br> surv_obj <span class="hljs-operator">&lt;&lt;-</span> res<span class="hljs-operator">$</span>surv_obj <span class="hljs-comment"># 为了兼容惰性求值</span><br> survminer<span class="hljs-operator">::</span>ggsurvplot<span class="hljs-punctuation">(</span>res<span class="hljs-operator">$</span>surv_fit<span class="hljs-punctuation">,</span> conf.int <span class="hljs-operator">=</span> <span class="hljs-built_in">F</span><span class="hljs-punctuation">,</span> pval <span class="hljs-operator">=</span> <span class="hljs-built_in">T</span><span class="hljs-punctuation">,</span> risk.table <span class="hljs-operator">=</span> <span class="hljs-built_in">T</span><span class="hljs-punctuation">,</span> ncensor.plot <span class="hljs-operator">=</span> <span class="hljs-literal">TRUE</span><span class="hljs-punctuation">)</span><br><span class="hljs-punctuation">&#125;</span><br></code></pre></td></tr></table></figure><h2 id="模拟计算">模拟计算</h2><figure class="highlight r"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br><span class="line">36</span><br><span class="line">37</span><br><span class="line">38</span><br><span class="line">39</span><br><span class="line">40</span><br><span class="line">41</span><br><span class="line">42</span><br><span class="line">43</span><br><span class="line">44</span><br><span class="line">45</span><br><span class="line">46</span><br><span class="line">47</span><br><span class="line">48</span><br><span class="line">49</span><br><span class="line">50</span><br><span class="line">51</span><br><span class="line">52</span><br><span class="line">53</span><br><span class="line">54</span><br><span class="line">55</span><br><span class="line">56</span><br></pre></td><td class="code"><pre><code class="hljs R">f_surv_logrank_simulation_Group <span class="hljs-operator">=</span> <span class="hljs-keyword">function</span><span class="hljs-punctuation">(</span>N<span class="hljs-punctuation">,</span> Median_Survival_Time<span class="hljs-punctuation">,</span> Lost<span class="hljs-punctuation">,</span> Duration_Accrual_Time<span class="hljs-punctuation">,</span> Duration_Total_Time<span class="hljs-punctuation">)</span><span class="hljs-punctuation">&#123;</span><br> time <span class="hljs-operator">=</span> stats<span class="hljs-operator">::</span>rexp<span class="hljs-punctuation">(</span>N<span class="hljs-punctuation">,</span> rate <span class="hljs-operator">=</span> <span class="hljs-built_in">log</span><span class="hljs-punctuation">(</span><span class="hljs-number">2</span><span class="hljs-punctuation">)</span> <span class="hljs-operator">/</span> Median_Survival_Time<span class="hljs-punctuation">)</span> <span class="hljs-comment"># 生存时间服从指数分布</span><br> status <span class="hljs-operator">=</span> <span class="hljs-built_in">rep</span><span class="hljs-punctuation">(</span><span class="hljs-number">1</span><span class="hljs-punctuation">,</span>N<span class="hljs-punctuation">)</span> <span class="hljs-comment"># 到生存时间发生结局事件</span><br> <span class="hljs-comment"># print(median((survfit(Surv(time, status) ~ 1))))</span><br> EnrollT <span class="hljs-operator">=</span> stats<span class="hljs-operator">::</span>runif<span class="hljs-punctuation">(</span>N<span class="hljs-punctuation">,</span> <span class="hljs-built_in">min</span> <span class="hljs-operator">=</span> <span class="hljs-number">0</span><span class="hljs-punctuation">,</span> <span class="hljs-built_in">max</span> <span class="hljs-operator">=</span> Duration_Accrual_Time<span class="hljs-punctuation">)</span> <span class="hljs-comment"># 招募时间服从均匀分布</span><br> calender_time <span class="hljs-operator">=</span> time <span class="hljs-operator">+</span> EnrollT <span class="hljs-comment"># 发生结局的日期</span><br> idx <span class="hljs-operator">=</span> calender_time <span class="hljs-operator">&gt;</span> Duration_Total_Time <span class="hljs-comment"># 研究终止时未发生结局事件</span><br> status<span class="hljs-punctuation">[</span>idx<span class="hljs-punctuation">]</span> <span class="hljs-operator">=</span> <span class="hljs-number">0</span><br> time<span class="hljs-punctuation">[</span>idx<span class="hljs-punctuation">]</span> <span class="hljs-operator">=</span> Duration_Total_Time <span class="hljs-operator">-</span> EnrollT<span class="hljs-punctuation">[</span>idx<span class="hljs-punctuation">]</span> <span class="hljs-comment"># 实际参与试验的时间</span><br> <span class="hljs-comment"># print(median((survfit(Surv(time, status) ~ 1)))) # 如果 Accrual_Time + Median_Survival &lt; Total_Time,结果不变</span><br> loss <span class="hljs-operator">=</span> stats<span class="hljs-operator">::</span>rexp<span class="hljs-punctuation">(</span>N<span class="hljs-punctuation">,</span> rate <span class="hljs-operator">=</span> Lost<span class="hljs-punctuation">)</span> <span class="hljs-comment"># 失访时间服从指数分布</span><br> idx <span class="hljs-operator">=</span> loss <span class="hljs-operator">&lt;</span> time <span class="hljs-comment"># 失访的人</span><br> status<span class="hljs-punctuation">[</span>idx<span class="hljs-punctuation">]</span> <span class="hljs-operator">=</span> <span class="hljs-number">0</span><br> time<span class="hljs-punctuation">[</span>idx<span class="hljs-punctuation">]</span> <span class="hljs-operator">=</span> loss<span class="hljs-punctuation">[</span>idx<span class="hljs-punctuation">]</span><br> <span class="hljs-comment"># print(median((survfit(Surv(time, status) ~ 1)))) # 结果改变</span><br> <span class="hljs-built_in">return</span><span class="hljs-punctuation">(</span><span class="hljs-built_in">list</span><span class="hljs-punctuation">(</span>time <span class="hljs-operator">=</span> time<span class="hljs-punctuation">,</span> status <span class="hljs-operator">=</span> status<span class="hljs-punctuation">)</span><span class="hljs-punctuation">)</span><br><span class="hljs-punctuation">&#125;</span><br>f_surv_logrank_simulation_Power <span class="hljs-operator">=</span> <span class="hljs-keyword">function</span><span class="hljs-punctuation">(</span>n_C<span class="hljs-punctuation">,</span> Median_Survival_Time_C<span class="hljs-punctuation">,</span> Lost_C<span class="hljs-punctuation">,</span> <br> n_T<span class="hljs-punctuation">,</span> Median_Survival_Time_T<span class="hljs-punctuation">,</span> Lost_T<span class="hljs-punctuation">,</span> <br> Duration_Accrual_Time<span class="hljs-punctuation">,</span> Duration_Total_Time<span class="hljs-punctuation">,</span> Simulation_Cycle<span class="hljs-punctuation">,</span> Alpha<span class="hljs-punctuation">)</span><span class="hljs-punctuation">&#123;</span><br> df <span class="hljs-operator">=</span> data.frame<span class="hljs-punctuation">(</span>group <span class="hljs-operator">=</span> factor<span class="hljs-punctuation">(</span><span class="hljs-built_in">c</span><span class="hljs-punctuation">(</span><span class="hljs-built_in">rep</span><span class="hljs-punctuation">(</span><span class="hljs-string">&#x27;Control&#x27;</span><span class="hljs-punctuation">,</span>n_C<span class="hljs-punctuation">)</span><span class="hljs-punctuation">,</span> <span class="hljs-built_in">rep</span><span class="hljs-punctuation">(</span><span class="hljs-string">&#x27;Treatment&#x27;</span><span class="hljs-punctuation">,</span>n_T<span class="hljs-punctuation">)</span><span class="hljs-punctuation">)</span><span class="hljs-punctuation">)</span><span class="hljs-punctuation">,</span> <br> time <span class="hljs-operator">=</span> <span class="hljs-built_in">rep</span><span class="hljs-punctuation">(</span><span class="hljs-number">0</span><span class="hljs-punctuation">,</span>n_C<span class="hljs-operator">+</span>n_T<span class="hljs-punctuation">)</span><span class="hljs-punctuation">,</span> <br> status <span class="hljs-operator">=</span> <span class="hljs-built_in">rep</span><span class="hljs-punctuation">(</span><span class="hljs-number">0</span><span class="hljs-punctuation">,</span>n_C<span class="hljs-operator">+</span>n_T<span class="hljs-punctuation">)</span><span class="hljs-punctuation">)</span><br> <span class="hljs-built_in">sum</span> <span class="hljs-operator">=</span> <span class="hljs-number">0</span><br> <span class="hljs-keyword">for</span> <span class="hljs-punctuation">(</span>i <span class="hljs-keyword">in</span> <span class="hljs-number">1</span><span class="hljs-operator">:</span>Simulation_Cycle<span class="hljs-punctuation">)</span> <span class="hljs-punctuation">&#123;</span><br> C <span class="hljs-operator">=</span> f_surv_logrank_simulation_Group<span class="hljs-punctuation">(</span>n_C<span class="hljs-punctuation">,</span> Median_Survival_Time_C<span class="hljs-punctuation">,</span> Lost_C<span class="hljs-punctuation">,</span> Duration_Accrual_Time<span class="hljs-punctuation">,</span> Duration_Total_Time<span class="hljs-punctuation">)</span><br> <span class="hljs-built_in">T</span> <span class="hljs-operator">=</span> f_surv_logrank_simulation_Group<span class="hljs-punctuation">(</span>n_T<span class="hljs-punctuation">,</span> Median_Survival_Time_T<span class="hljs-punctuation">,</span> Lost_T<span class="hljs-punctuation">,</span> Duration_Accrual_Time<span class="hljs-punctuation">,</span> Duration_Total_Time<span class="hljs-punctuation">)</span><br> df<span class="hljs-operator">$</span>time <span class="hljs-operator">=</span> <span class="hljs-built_in">c</span><span class="hljs-punctuation">(</span>C<span class="hljs-operator">$</span>time<span class="hljs-punctuation">,</span> <span class="hljs-built_in">T</span><span class="hljs-operator">$</span>time<span class="hljs-punctuation">)</span><br> df<span class="hljs-operator">$</span>status <span class="hljs-operator">=</span> <span class="hljs-built_in">c</span><span class="hljs-punctuation">(</span>C<span class="hljs-operator">$</span>status<span class="hljs-punctuation">,</span> <span class="hljs-built_in">T</span><span class="hljs-operator">$</span>status<span class="hljs-punctuation">)</span><br> <span class="hljs-keyword">if</span><span class="hljs-punctuation">(</span>f_surv_logrank<span class="hljs-punctuation">(</span>df<span class="hljs-punctuation">)</span><span class="hljs-operator">$</span>pv <span class="hljs-operator">&lt;</span> Alpha<span class="hljs-punctuation">)</span><span class="hljs-punctuation">&#123;</span><br> <span class="hljs-built_in">sum</span> <span class="hljs-operator">=</span> <span class="hljs-built_in">sum</span> <span class="hljs-operator">+</span> <span class="hljs-number">1</span><br> <span class="hljs-punctuation">&#125;</span><br> <span class="hljs-punctuation">&#125;</span><br> <span class="hljs-built_in">return</span><span class="hljs-punctuation">(</span><span class="hljs-built_in">sum</span><span class="hljs-operator">/</span>Simulation_Cycle<span class="hljs-punctuation">)</span><br><span class="hljs-punctuation">&#125;</span><br>f_surv_logrank_simulation_Sample_Size <span class="hljs-operator">=</span> <span class="hljs-keyword">function</span><span class="hljs-punctuation">(</span>n_C_min<span class="hljs-punctuation">,</span> n_C_max<span class="hljs-punctuation">,</span> Median_Survival_Time_C<span class="hljs-punctuation">,</span> Lost_C<span class="hljs-punctuation">,</span> <br> TvsC<span class="hljs-punctuation">,</span> Median_Survival_Time_T<span class="hljs-punctuation">,</span> Lost_T<span class="hljs-punctuation">,</span> <br> Duration_Accrual_Time<span class="hljs-punctuation">,</span> Duration_Total_Time<span class="hljs-punctuation">,</span><br> Simulation_Cycle<span class="hljs-punctuation">,</span> Alpha<span class="hljs-punctuation">,</span> Power<span class="hljs-punctuation">,</span> err<span class="hljs-operator">=</span><span class="hljs-number">0.01</span><span class="hljs-punctuation">)</span><span class="hljs-punctuation">&#123;</span><br> mid <span class="hljs-operator">=</span> <span class="hljs-built_in">floor</span><span class="hljs-punctuation">(</span><span class="hljs-punctuation">(</span>n_C_min <span class="hljs-operator">+</span> n_C_max<span class="hljs-punctuation">)</span> <span class="hljs-operator">/</span> <span class="hljs-number">2</span><span class="hljs-punctuation">)</span> <span class="hljs-comment"># 以防没有进入循环</span><br> <span class="hljs-keyword">while</span> <span class="hljs-punctuation">(</span>n_C_min <span class="hljs-operator">&lt;</span> n_C_max<span class="hljs-punctuation">)</span> <span class="hljs-punctuation">&#123;</span><br> mid <span class="hljs-operator">=</span> <span class="hljs-built_in">floor</span><span class="hljs-punctuation">(</span><span class="hljs-punctuation">(</span>n_C_min <span class="hljs-operator">+</span> n_C_max<span class="hljs-punctuation">)</span> <span class="hljs-operator">/</span> <span class="hljs-number">2</span><span class="hljs-punctuation">)</span><br> simulation_Power <span class="hljs-operator">=</span> f_surv_logrank_simulation_Power<span class="hljs-punctuation">(</span>mid<span class="hljs-punctuation">,</span> Median_Survival_Time_C<span class="hljs-punctuation">,</span> Lost_C<span class="hljs-punctuation">,</span> <br> <span class="hljs-built_in">as.integer</span><span class="hljs-punctuation">(</span>mid <span class="hljs-operator">*</span> TvsC<span class="hljs-punctuation">)</span><span class="hljs-punctuation">,</span> Median_Survival_Time_T<span class="hljs-punctuation">,</span> Lost_T<span class="hljs-punctuation">,</span> <br> Duration_Accrual_Time<span class="hljs-punctuation">,</span> Duration_Total_Time<span class="hljs-punctuation">,</span> Simulation_Cycle<span class="hljs-punctuation">,</span> Alpha<span class="hljs-punctuation">)</span><br> print<span class="hljs-punctuation">(</span>paste<span class="hljs-punctuation">(</span><span class="hljs-string">&quot;mid:&quot;</span><span class="hljs-punctuation">,</span> mid<span class="hljs-punctuation">,</span> <span class="hljs-string">&quot;simulation_Power:&quot;</span><span class="hljs-punctuation">,</span> simulation_Power<span class="hljs-punctuation">)</span><span class="hljs-punctuation">)</span><br> <span class="hljs-keyword">if</span> <span class="hljs-punctuation">(</span><span class="hljs-built_in">abs</span><span class="hljs-punctuation">(</span>simulation_Power <span class="hljs-operator">-</span> Power<span class="hljs-punctuation">)</span> <span class="hljs-operator">&lt;</span> err<span class="hljs-punctuation">)</span> <span class="hljs-punctuation">&#123;</span><br> <span class="hljs-built_in">return</span><span class="hljs-punctuation">(</span>mid<span class="hljs-punctuation">)</span><br> <span class="hljs-punctuation">&#125;</span><span class="hljs-keyword">else</span> <span class="hljs-keyword">if</span><span class="hljs-punctuation">(</span>simulation_Power <span class="hljs-operator">&lt;</span> Power<span class="hljs-punctuation">)</span> <span class="hljs-punctuation">&#123;</span><br> n_C_min <span class="hljs-operator">=</span> mid <span class="hljs-operator">+</span> <span class="hljs-number">1</span><br> <span class="hljs-punctuation">&#125;</span><span class="hljs-keyword">else</span> <span class="hljs-punctuation">&#123;</span><br> n_C_max <span class="hljs-operator">=</span> mid <span class="hljs-operator">-</span> <span class="hljs-number">1</span><br> <span class="hljs-punctuation">&#125;</span><br> <span class="hljs-punctuation">&#125;</span><br> <span class="hljs-built_in">return</span><span class="hljs-punctuation">(</span>mid<span class="hljs-punctuation">)</span><br><span class="hljs-punctuation">&#125;</span><br></code></pre></td></tr></table></figure><h2 id="参数说明">参数说明</h2><figure class="highlight r"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br></pre></td><td class="code"><pre><code class="hljs R">Power <span class="hljs-operator">=</span> <span class="hljs-number">0.9</span> <span class="hljs-comment"># 检验效能 = 1 - 第二类错误的概率</span><br>Alpha <span class="hljs-operator">=</span> <span class="hljs-number">0.05</span> <span class="hljs-comment"># 第一类错误的概率</span><br>Median_Survival_Time_C <span class="hljs-operator">=</span> <span class="hljs-number">6</span> <span class="hljs-comment"># 对照组的中位生存时间</span><br>Median_Survival_Time_T <span class="hljs-operator">=</span> <span class="hljs-number">8</span> <span class="hljs-comment"># 试验组的中位生存时间</span><br>Duration_Accrual_Time <span class="hljs-operator">=</span> <span class="hljs-number">8</span> <span class="hljs-comment"># 入组完成用时</span><br>Duration_Total_Time <span class="hljs-operator">=</span> <span class="hljs-number">18</span> <span class="hljs-comment"># 总试验用时</span><br>Lost_C <span class="hljs-operator">=</span> <span class="hljs-number">0.05</span> <span class="hljs-comment"># 对照组随访单位时间后发生失访的概率</span><br>Lost_T <span class="hljs-operator">=</span> <span class="hljs-number">0.05</span> <span class="hljs-comment"># 试验组随访单位时间后发生失访的概率</span><br>TvsC <span class="hljs-operator">=</span> <span class="hljs-number">1</span> <span class="hljs-comment"># 试验组的样本量:对照组的样本量 1:1 = 1</span><br>Simulation_Cycle <span class="hljs-operator">=</span> <span class="hljs-number">100</span> <span class="hljs-comment"># 模拟的循环次数,越大越准确</span><br></code></pre></td></tr></table></figure><h2 id="检查效果">检查效果</h2><figure class="highlight r"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br></pre></td><td class="code"><pre><code class="hljs R">f_surv_logrank_simulation_Power<span class="hljs-punctuation">(</span><span class="hljs-number">441</span><span class="hljs-punctuation">,</span> <span class="hljs-number">6</span><span class="hljs-punctuation">,</span> <span class="hljs-number">0.05</span><span class="hljs-punctuation">,</span> <br> <span class="hljs-number">442</span><span class="hljs-punctuation">,</span> <span class="hljs-number">8</span><span class="hljs-punctuation">,</span> <span class="hljs-number">0.05</span><span class="hljs-punctuation">,</span><br> <span class="hljs-number">8</span><span class="hljs-punctuation">,</span> <span class="hljs-number">18</span><span class="hljs-punctuation">,</span> <span class="hljs-number">1000</span><span class="hljs-punctuation">,</span> <span class="hljs-number">0.05</span><br><span class="hljs-punctuation">)</span><br><span class="hljs-comment"># PASS的结果是 0.9</span><br>f_surv_logrank_simulation_Sample_Size<span class="hljs-punctuation">(</span><span class="hljs-number">0</span><span class="hljs-punctuation">,</span> <span class="hljs-number">1000</span><span class="hljs-punctuation">,</span> <span class="hljs-number">6</span><span class="hljs-punctuation">,</span> <span class="hljs-number">0.05</span><span class="hljs-punctuation">,</span> <br> <span class="hljs-number">1</span><span class="hljs-punctuation">,</span> <span class="hljs-number">8</span><span class="hljs-punctuation">,</span> <span class="hljs-number">0.05</span><span class="hljs-punctuation">,</span><br> <span class="hljs-number">8</span><span class="hljs-punctuation">,</span> <span class="hljs-number">18</span><span class="hljs-punctuation">,</span> <span class="hljs-number">1000</span><span class="hljs-punctuation">,</span> <span class="hljs-number">0.05</span><span class="hljs-punctuation">,</span> <span class="hljs-number">0.9</span><br><span class="hljs-punctuation">)</span><br><span class="hljs-comment"># PASS的结果是 441</span><br></code></pre></td></tr></table></figure>]]></content:encoded>
  92. <category domain="https://hexo.limour.top/tags/bootstrap/">Bootstrap</category>
  93. <comments>https://hexo.limour.top/Sample-size-calculation-for-survival-analysis-in-clinical-research#disqus_thread</comments>
  94. </item>
  95. <item>
  96. <title>【探索】6G显存畅玩无限长度的LLM角色扮演</title>
  97. <link>https://hexo.limour.top/Enjoy-unlimited-length-LLM-role-playing-with-6GB-of-VRAM</link>
  98. <guid>https://hexo.limour.top/Enjoy-unlimited-length-LLM-role-playing-with-6GB-of-VRAM</guid>
  99. <pubDate>Sat, 10 Feb 2024 01:02:10 GMT</pubDate>
  100. <description>&lt;p&gt;角色扮演的体验是否舒适主要受角色卡、大模型和生成时间三个因素的影响。&lt;/p&gt;
  101. &lt;p&gt;优秀的角色卡往往附带大量的设定,这会极大的拖慢第一次生成的时间,并且随着对话的进行,上下文长度很容易超过kv_cache的上限,这些很破坏沉浸式的体验。&lt;/p&gt;
  102. &lt;p&gt;此外,大模型在进行角色</description>
  103. <content:encoded><![CDATA[<p>角色扮演的体验是否舒适主要受角色卡、大模型和生成时间三个因素的影响。</p><p>优秀的角色卡往往附带大量的设定,这会极大的拖慢第一次生成的时间,并且随着对话的进行,上下文长度很容易超过kv_cache的上限,这些很破坏沉浸式的体验。</p><p>此外,大模型在进行角色扮演时,除了进行必要的对话生成外,还需要生成旁白增加想象空间。</p><p>对博主这些相比填空更喜欢选项的玩家,给出提问建议也是非常必要的:在建议的基础上修改比自己从零写一个情景更简单,同时也完整保留了控制剧情走向的权力。</p><p>以上这些都让本就稀缺的kv_cache更加雪上加霜。</p><p>万幸,StreamingLLM 发现了kv_cache具有良好的平移性,而 llama.cpp 也提供了对kv_cache进行底层操作的api:可以指定范围的 kv_cache_seq_rm 和 kv_cache_seq_shift。基于这两个api,我们将实现对kv_cache的 token 级微操,榨干kv_cache的全部价值。</p><p>博主实践表明,在充分利用kv_cache的基础上,哪怕是 huggingface space 免费的2vCPU容器也可以游玩角色扮演,而笔记本端6G显存的1660Ti可以做到畅玩角色扮演。</p><h2 id="体验-DEMO">体验 DEMO</h2><ul><li><a href="https://hexo.limour.top/go/#aHR0cHM6Ly9odWdnaW5nZmFjZS5jby9zcGFjZXMvTGltb3VyL2xsYW1hLXB5dGhvbi1zdHJlYW1pbmdsbG0=" rel="noopener external nofollow noreferrer">Limour/llama-python-streamingllm</a></li><li>同一时间仅支持一个人用,用之前点 Reset 按钮恢复初始的 kv_cache</li><li>按 Submit 没反应,说明有人在用,等一段时间后再 Reset</li><li>最好是 Duplicate 后,设为私密来使用</li></ul><h2 id="代码仓库">代码仓库</h2><ul><li><a href="https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL0xpbW91ci1kZXYvbGxhbWEtcHl0aG9uLXN0cmVhbWluZ2xsbQ==" rel="noopener external nofollow noreferrer">llama-python-streamingllm</a></li><li><a href="/-ji-lu--an-zhuang-conda-bing-geng-huan-qing-hua-yuan">安装conda</a></li><li><a href="/Use-Tunnel-to-speed-up-the-connection-of-VPS">学术上网</a>(管理员权限)</li><li>使用前需要修改 <code>rp_config.json</code> 里的模型路径和参数,指定为你已经下载了的<code>GGUF</code>格式模型的路径</li><li>推荐 <a href="https://hexo.limour.top/go/#aHR0cHM6Ly9odWdnaW5nZmFjZS5jby9UaGVCbG9rZS9DYXVzYWxMTS03Qi1HR1VGL2Jsb2IvbWFpbi9jYXVzYWxsbV83Yi5RNV9LX00uZ2d1Zg==" rel="noopener external nofollow noreferrer">causallm_7b.Q5_K_M.gguf</a></li><li>或者自己用 <a href="https://hexo.limour.top/go/#aHR0cHM6Ly9odWdnaW5nZmFjZS5jby9kYXRhc2V0cy9MaW1vdXIvYi1jb3JwdXM=" rel="noopener external nofollow noreferrer">Galgame</a> 解包的对话数据集微调一个合适的模型。</li></ul><h3 id="二选一:GPU版本的环境">二选一:GPU版本的环境</h3><figure class="highlight powershell"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><code class="hljs powershell">conda create <span class="hljs-literal">-n</span> llamaCpp libcublas cuda<span class="hljs-literal">-toolkit</span> git <span class="hljs-literal">-c</span> nvidia <span class="hljs-literal">-c</span> conda<span class="hljs-literal">-forge</span><br>conda activate llamaCpp<br>conda install python=<span class="hljs-number">3.10</span> gradio <span class="hljs-literal">-c</span> conda<span class="hljs-literal">-forge</span><br><span class="hljs-comment"># 然后去 release 下载相应的包 https://github.com/Limour-dev/llama-cpp-python-cuBLAS-wheels/releases</span><br>pip install <span class="hljs-literal">--force-reinstall</span> llama_cpp_python<span class="hljs-literal">-0</span>.<span class="hljs-number">2.39</span>+cu122<span class="hljs-literal">-cp310-cp310-win_amd64</span>.whl<br></code></pre></td></tr></table></figure><h3 id="二选一:CPU版本的环境">二选一:CPU版本的环境</h3><figure class="highlight powershell"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><code class="hljs powershell">conda create <span class="hljs-literal">-n</span> llamaCpp python=<span class="hljs-number">3.10</span> gradio git <span class="hljs-literal">-c</span> conda<span class="hljs-literal">-forge</span><br>conda activate llamaCpp<br>pip install llama<span class="hljs-literal">-cpp-python</span>==<span class="hljs-number">0.2</span>.<span class="hljs-number">39</span><br></code></pre></td></tr></table></figure><h3 id="下载并运行">下载并运行</h3><figure class="highlight powershell"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><code class="hljs powershell">conda activate llamaCpp<br>git clone <span class="hljs-literal">--depth</span>=<span class="hljs-number">1</span> https://github.com/Limour<span class="hljs-literal">-dev</span>/llama<span class="hljs-literal">-python-streamingllm</span>.git<br><span class="hljs-built_in">cd</span> llama<span class="hljs-literal">-python-streamingllm</span><br>mkdir cache<br>python .\gradio_streamingllm.py<br></code></pre></td></tr></table></figure><h2 id="核心内容">核心内容</h2><ul><li><code>Submit</code> 会将 msg 发送给模型,然后流式生成回答</li><li><code>Retry</code> 会重新生成最近一次的 msg 所对应的回答</li><li><code>旁白</code> 会流式生成一份旁白到 <code>VO</code> 框</li><li><code>建议</code> 会以 usr 的身份流式生成一份 msg 供修改</li><li>上面四个功能的基础就是下面的基于 StreamingLLM 原理的 venv 开头的函数</li></ul><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br><span class="line">36</span><br><span class="line">37</span><br><span class="line">38</span><br><span class="line">39</span><br><span class="line">40</span><br><span class="line">41</span><br><span class="line">42</span><br><span class="line">43</span><br><span class="line">44</span><br><span class="line">45</span><br><span class="line">46</span><br><span class="line">47</span><br><span class="line">48</span><br><span class="line">49</span><br><span class="line">50</span><br><span class="line">51</span><br><span class="line">52</span><br><span class="line">53</span><br><span class="line">54</span><br><span class="line">55</span><br><span class="line">56</span><br><span class="line">57</span><br><span class="line">58</span><br><span class="line">59</span><br><span class="line">60</span><br><span class="line">61</span><br><span class="line">62</span><br><span class="line">63</span><br><span class="line">64</span><br><span class="line">65</span><br><span class="line">66</span><br><span class="line">67</span><br><span class="line">68</span><br><span class="line">69</span><br><span class="line">70</span><br><span class="line">71</span><br><span class="line">72</span><br><span class="line">73</span><br><span class="line">74</span><br><span class="line">75</span><br><span class="line">76</span><br><span class="line">77</span><br><span class="line">78</span><br><span class="line">79</span><br><span class="line">80</span><br><span class="line">81</span><br><span class="line">82</span><br><span class="line">83</span><br><span class="line">84</span><br><span class="line">85</span><br></pre></td><td class="code"><pre><code class="hljs python"><span class="hljs-keyword">class</span> <span class="hljs-title class_">StreamingLLM</span>(<span class="hljs-title class_ inherited__">Llama</span>):<br> <span class="hljs-keyword">pass</span><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">kv_cache_seq_trim</span>(<span class="hljs-params">self</span>):<br> self._ctx.kv_cache_seq_rm(-<span class="hljs-number">1</span>, self.n_tokens, -<span class="hljs-number">1</span>)<br><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">kv_cache_seq_ltrim</span>(<span class="hljs-params">self, n_keep, n_discard=<span class="hljs-number">256</span>, n_past=-<span class="hljs-number">1</span></span>):<br> <span class="hljs-keyword">if</span> n_past &lt; <span class="hljs-number">0</span>:<br> n_past = self.n_tokens<br> self._ctx.kv_cache_seq_rm(-<span class="hljs-number">1</span>, n_keep, n_keep + n_discard)<br> self._ctx.kv_cache_seq_shift(<span class="hljs-number">0</span>, n_keep + n_discard, n_past, -n_discard)<br> self.input_ids[n_keep:n_past - n_discard] = self.input_ids[n_keep + n_discard:n_past]<br> self.n_tokens = n_past - n_discard<br><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">_venv_init</span>(<span class="hljs-params">self</span>):<br> self.venv = [<span class="hljs-number">0</span>]<br> self.venv_idx_map = []<br><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">venv_create</span>(<span class="hljs-params">self, name: <span class="hljs-built_in">str</span></span>):<br> self.venv.append(<span class="hljs-number">0</span>)<br> self.venv_idx_map.append(name)<br> <span class="hljs-keyword">return</span> name<br><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">venv_disband</span>(<span class="hljs-params">self, name_set</span>):<br> <span class="hljs-keyword">if</span> <span class="hljs-built_in">len</span>(self.venv) &lt;= <span class="hljs-number">1</span>:<br> <span class="hljs-keyword">return</span> <span class="hljs-literal">False</span><br> name_set = &#123;x <span class="hljs-keyword">for</span> x <span class="hljs-keyword">in</span> name_set <span class="hljs-keyword">if</span> x <span class="hljs-keyword">in</span> self.venv_idx_map&#125;<br> <span class="hljs-keyword">if</span> <span class="hljs-keyword">not</span> name_set:<br> <span class="hljs-keyword">return</span> <span class="hljs-literal">False</span><br> <span class="hljs-keyword">while</span> self.venv_idx_map:<br> <span class="hljs-keyword">if</span> self.venv_idx_map[<span class="hljs-number">0</span>] <span class="hljs-keyword">in</span> name_set:<br> self.venv_idx_map.pop(<span class="hljs-number">0</span>) <span class="hljs-comment"># 删除</span><br> tmp = self.venv.pop(<span class="hljs-number">1</span>) <span class="hljs-comment"># 对应的 venv 移入上一层</span><br> self.venv[<span class="hljs-number">0</span>] += tmp<br> <span class="hljs-keyword">else</span>:<br> <span class="hljs-keyword">break</span><br> <span class="hljs-keyword">return</span> <span class="hljs-literal">True</span><br><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">venv_revision</span>(<span class="hljs-params">self, name: <span class="hljs-built_in">str</span></span>):<br> <span class="hljs-keyword">if</span> <span class="hljs-built_in">len</span>(self.venv) &lt;= <span class="hljs-number">1</span>:<br> <span class="hljs-keyword">return</span> <span class="hljs-literal">False</span><br> <span class="hljs-keyword">if</span> name <span class="hljs-keyword">not</span> <span class="hljs-keyword">in</span> self.venv_idx_map:<br> <span class="hljs-keyword">return</span> <span class="hljs-literal">False</span><br> _s = <span class="hljs-number">0</span><br> <span class="hljs-keyword">while</span> self.venv_idx_map:<br> <span class="hljs-keyword">if</span> self.venv_idx_map[-<span class="hljs-number">1</span>] == name:<br> <span class="hljs-keyword">break</span><br> self.venv_idx_map.pop() <span class="hljs-comment"># 删除</span><br> _s += self.venv.pop()<br> <span class="hljs-keyword">if</span> _s:<br> self.n_tokens -= <span class="hljs-built_in">min</span>(_s, self.n_tokens)<br> self.kv_cache_seq_trim()<br> <span class="hljs-keyword">return</span> <span class="hljs-literal">True</span><br><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">venv_remove</span>(<span class="hljs-params">self, name: <span class="hljs-built_in">str</span></span>):<br> <span class="hljs-keyword">if</span> <span class="hljs-built_in">len</span>(self.venv) &lt;= <span class="hljs-number">1</span>:<br> <span class="hljs-keyword">return</span> <span class="hljs-literal">False</span><br> <span class="hljs-keyword">if</span> name <span class="hljs-keyword">not</span> <span class="hljs-keyword">in</span> self.venv_idx_map:<br> <span class="hljs-keyword">return</span> <span class="hljs-literal">False</span><br> venv_idx = self.venv_idx_map.index(name) + <span class="hljs-number">1</span><br> <span class="hljs-keyword">while</span> self.venv_idx_map:<br> self.venv_idx_map.pop(venv_idx - <span class="hljs-number">1</span>) <span class="hljs-comment"># 删除</span><br> <span class="hljs-keyword">if</span> venv_idx == <span class="hljs-built_in">len</span>(self.venv) - <span class="hljs-number">1</span>:<br> <span class="hljs-comment"># 最后一层</span><br> self.n_tokens -= <span class="hljs-built_in">min</span>(self.venv.pop(), self.n_tokens)<br> self.kv_cache_seq_trim()<br> <span class="hljs-keyword">break</span><br> <span class="hljs-keyword">else</span>:<br> <span class="hljs-comment"># 非最后一层</span><br> n_keep = self.n_tokens - <span class="hljs-built_in">sum</span>(self.venv[i] <span class="hljs-keyword">for</span> i <span class="hljs-keyword">in</span> <span class="hljs-built_in">range</span>(venv_idx, <span class="hljs-built_in">len</span>(self.venv)))<br> n_discard = self.venv.pop(venv_idx)<br> self.kv_cache_seq_ltrim(n_keep, n_discard)<br> <span class="hljs-keyword">try</span>:<br> venv_idx = self.venv_idx_map.index(name, venv_idx - <span class="hljs-number">1</span>) + <span class="hljs-number">1</span><br> <span class="hljs-keyword">except</span> ValueError: <span class="hljs-comment"># 没有了</span><br> <span class="hljs-keyword">break</span><br> <span class="hljs-keyword">return</span> <span class="hljs-literal">True</span><br><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">eval_t</span>(<span class="hljs-params">self, tokens, n_keep=<span class="hljs-number">4</span>, n_discard=<span class="hljs-number">256</span>, im_start=<span class="hljs-literal">None</span></span>):<br> <span class="hljs-keyword">if</span> self._n_ctx &lt; self.n_tokens + <span class="hljs-built_in">len</span>(tokens):<br> tmp_n_discard = <span class="hljs-built_in">max</span>(n_discard, self.n_tokens + <span class="hljs-built_in">len</span>(tokens) - self._n_ctx)<br> self.kv_cache_seq_ltrim(n_keep, tmp_n_discard)<br> <span class="hljs-keyword">for</span> i <span class="hljs-keyword">in</span> <span class="hljs-built_in">range</span>(<span class="hljs-number">0</span>, <span class="hljs-built_in">len</span>(tokens), self.n_batch):<br> <span class="hljs-keyword">pass</span><br> self.n_tokens += n_tokens<br> self.venv[-<span class="hljs-number">1</span>] += n_tokens<br></code></pre></td></tr></table></figure>]]></content:encoded>
  104. <category domain="https://hexo.limour.top/tags/%E6%8E%A2%E7%B4%A2/">探索</category>
  105. <category domain="https://hexo.limour.top/tags/llama/">llama</category>
  106. <comments>https://hexo.limour.top/Enjoy-unlimited-length-LLM-role-playing-with-6GB-of-VRAM#disqus_thread</comments>
  107. </item>
  108. <item>
  109. <title>【探索】将BlueLM-7B-Chat转换为标准的GGUF模型</title>
  110. <link>https://hexo.limour.top/Convert-BlueLM-7B-Chat-to-the-standard-GGUF-model</link>
  111. <guid>https://hexo.limour.top/Convert-BlueLM-7B-Chat-to-the-standard-GGUF-model</guid>
  112. <pubDate>Sat, 03 Feb 2024 22:38:07 GMT</pubDate>
  113. <description>&lt;h2 id=&quot;准备模型&quot;&gt;准备模型&lt;/h2&gt;
  114. &lt;ul&gt;
  115. &lt;li&gt;&lt;a href=&quot;/Running-Qwen-on-the-Win10-platform-with-6GB-of-video-memory&quot;&gt;运行环境&lt;/a&gt;&lt;/li&gt;
  116. &lt;/ul&gt;
  117. &lt;figure class=&quot;h</description>
  118. <content:encoded><![CDATA[<h2 id="准备模型">准备模型</h2><ul><li><a href="/Running-Qwen-on-the-Win10-platform-with-6GB-of-video-memory">运行环境</a></li></ul><figure class="highlight powershell"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br></pre></td><td class="code"><pre><code class="hljs powershell"><span class="hljs-comment"># conda create -n llamaConvert python=3.10 git -c conda-forge</span><br><span class="hljs-comment"># conda activate llamaConvert</span><br><span class="hljs-comment"># cd D:\llama</span><br><span class="hljs-comment"># git clone --depth=1 https://github.com/ggerganov/llama.cpp.git</span><br><span class="hljs-comment"># cd llama.cpp</span><br><span class="hljs-comment"># python -m pip install -r requirements.txt</span><br><span class="hljs-comment"># pip install tiktoken</span><br><span class="hljs-variable">$env:HF_ENDPOINT</span>=<span class="hljs-string">&quot;https://hf-mirror.com&quot;</span>; python <span class="hljs-literal">-c</span> <span class="hljs-string">&quot;from huggingface_hub import snapshot_download; snapshot_download(repo_id=&#x27;vivo-ai/BlueLM-7B-Chat-32K&#x27;, local_dir=r&#x27;D:\models\BlueLM-7B&#x27;)&quot;</span><br><span class="hljs-comment"># 还是用 vivo-ai/BlueLM-7B-Chat 吧, 32k的 ntkmixed 长度外推方案不知道怎么改</span><br></code></pre></td></tr></table></figure><ul><li>初始的模型结构</li></ul><figure class="highlight txt"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br></pre></td><td class="code"><pre><code class="hljs txt">BlueLMForCausalLM(<br> (model): BlueLMModel(<br> (embed_tokens): Embedding(100096, 4096, padding_idx=3)<br> (embed_layer_norm): LayerNorm((4096,), eps=1e-06, elementwise_affine=True)<br> (layers): ModuleList(<br> (0-31): 32 x BlueLMDecoderLayer(<br> (self_attn): BlueLMAttention(<br> (q_proj): Linear(in_features=4096, out_features=4096, bias=False)<br> (k_proj): Linear(in_features=4096, out_features=4096, bias=False)<br> (v_proj): Linear(in_features=4096, out_features=4096, bias=False)<br> (o_proj): Linear(in_features=4096, out_features=4096, bias=False)<br> (rotary_emb): BlueLMRotaryEmbedding()<br> )<br> (mlp): BlueLMMLP(<br> (gate_proj): Linear(in_features=4096, out_features=11008, bias=False)<br> (down_proj): Linear(in_features=11008, out_features=4096, bias=False)<br> (up_proj): Linear(in_features=4096, out_features=11008, bias=False)<br> (act_fn): SiLU()<br> (dropout): Dropout(p=0, inplace=False)<br> )<br> (input_layernorm): BlueLMRMSNorm()<br> (post_attention_layernorm): BlueLMRMSNorm()<br> )<br> )<br> (norm): BlueLMRMSNorm()<br> )<br> (lm_head): Linear(in_features=4096, out_features=100096, bias=False)<br>)<br></code></pre></td></tr></table></figure><h2 id="归一化-embed">归一化 embed</h2><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br></pre></td><td class="code"><pre><code class="hljs python"><span class="hljs-keyword">from</span> transformers <span class="hljs-keyword">import</span> AutoModelForCausalLM<br><span class="hljs-keyword">import</span> torch<br><br><span class="hljs-comment"># 提前将 modeling_bluelm.py 中用到 flash_attn 的部分改成 None,反正不真运行,只需要模型结构</span><br>tmp = AutoModelForCausalLM.from_pretrained(<span class="hljs-string">r&#x27;D:\models\BlueLM-7B&#x27;</span>,<br> torch_dtype=torch.bfloat16,<br> trust_remote_code=<span class="hljs-literal">True</span>)<br><br>test_i = torch.arange(<span class="hljs-number">0</span>, <span class="hljs-number">10</span>, dtype=torch.long)<br><br>embedding = tmp.model.embed_tokens<br>layer_norm = tmp.model.embed_layer_norm<br><br>test_o_o = embedding(test_i)<br>test_o_o = layer_norm(test_o_o)<br><br><span class="hljs-keyword">for</span> param <span class="hljs-keyword">in</span> embedding.parameters():<br> <span class="hljs-keyword">if</span> <span class="hljs-built_in">len</span>(param.shape) &gt; <span class="hljs-number">1</span>:<br> param.data = layer_norm(param.data)<br><br>test_o_c = embedding(test_i)<br><br><span class="hljs-built_in">print</span>(torch.allclose(test_o_o, test_o_c, atol=<span class="hljs-number">1e-4</span>))<br><br><span class="hljs-keyword">del</span> tmp.model.embed_layer_norm<br>tmp.save_pretrained(<span class="hljs-string">r&#x27;D:\models\BlueLM&#x27;</span>)<br><span class="hljs-comment"># 记得将缺失的一些文件手动复制一下</span><br><span class="hljs-comment"># 顺便删掉config.json里的rope scaling type</span><br></code></pre></td></tr></table></figure><ul><li>删除 embed_layer_norm 后的结构</li></ul><figure class="highlight txt"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br></pre></td><td class="code"><pre><code class="hljs txt">BlueLMForCausalLM(<br> (model): BlueLMModel(<br> (embed_tokens): Embedding(100096, 4096, padding_idx=3)<br> (layers): ModuleList(<br> (0-31): 32 x BlueLMDecoderLayer(<br> (self_attn): BlueLMAttention(<br> (q_proj): Linear(in_features=4096, out_features=4096, bias=False)<br> (k_proj): Linear(in_features=4096, out_features=4096, bias=False)<br> (v_proj): Linear(in_features=4096, out_features=4096, bias=False)<br> (o_proj): Linear(in_features=4096, out_features=4096, bias=False)<br> (rotary_emb): BlueLMRotaryEmbedding()<br> )<br> (mlp): BlueLMMLP(<br> (gate_proj): Linear(in_features=4096, out_features=11008, bias=False)<br> (down_proj): Linear(in_features=11008, out_features=4096, bias=False)<br> (up_proj): Linear(in_features=4096, out_features=11008, bias=False)<br> (act_fn): SiLU()<br> (dropout): Dropout(p=0, inplace=False)<br> )<br> (input_layernorm): BlueLMRMSNorm()<br> (post_attention_layernorm): BlueLMRMSNorm()<br> )<br> )<br> (norm): BlueLMRMSNorm()<br> )<br> (lm_head): Linear(in_features=4096, out_features=100096, bias=False)<br>)<br></code></pre></td></tr></table></figure><h2 id="测试运行">测试运行</h2><figure class="highlight powershell"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br></pre></td><td class="code"><pre><code class="hljs powershell">conda activate llamaConvert<br><span class="hljs-built_in">cd</span> D:\llama\llama.cpp<br>python convert.py D:\models\BlueLM <span class="hljs-literal">--padvocab</span><br>Wrote D:\models\BlueLM\ggml<span class="hljs-literal">-model-f16</span>.gguf<br>conda activate llamaCpp<br><span class="hljs-built_in">cd</span> D:\llama<span class="hljs-literal">-cublas</span><br>.\quantize.exe D:\models\BlueLM\ggml<span class="hljs-literal">-model-f16</span>.gguf D:\models\BlueLM\ggml<span class="hljs-literal">-model-Q5_K_M</span>.gguf Q5_K_M<br>.\main.exe <span class="hljs-literal">-m</span> D:\models\BlueLM\ggml<span class="hljs-literal">-model-Q5_K_M</span>.gguf <span class="hljs-literal">-ngl</span> <span class="hljs-number">25</span> <span class="hljs-literal">-c</span> <span class="hljs-number">1024</span> <span class="hljs-literal">--interactive-first</span><br></code></pre></td></tr></table></figure>]]></content:encoded>
  119. <category domain="https://hexo.limour.top/tags/%E6%8E%A2%E7%B4%A2/">探索</category>
  120. <category domain="https://hexo.limour.top/tags/llama/">llama</category>
  121. <comments>https://hexo.limour.top/Convert-BlueLM-7B-Chat-to-the-standard-GGUF-model#disqus_thread</comments>
  122. </item>
  123. <item>
  124. <title>【探索】从零开始训练 GPT</title>
  125. <link>https://hexo.limour.top/training-gpt-from-scratch</link>
  126. <guid>https://hexo.limour.top/training-gpt-from-scratch</guid>
  127. <pubDate>Thu, 18 Jan 2024 14:19:11 GMT</pubDate>
  128. <description>探索整个过程,从在一台搭载1660Ti显卡的笔记本电脑上构建 Tokenizer,定义带有 RoPE 的 Transformer,一直到训练、保存模型和可视化训练过程。沉浸在从零开始训练 GPT 的旅程中,深入了解每一个步骤。跳入深度学习的世界,释放在你的便携1660Ti笔记本上的强大潜能。</description>
  129. <content:encoded><![CDATA[<p><img src="https://img.limour.top/2024/01/18/65a93c6a8065a.webp" alt="训练中..."></p><h2 id="预期结构">预期结构</h2><ul><li><a href="https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL0xpbW91ci1kZXYvSGVsbG9HUFQ=" rel="noopener external nofollow noreferrer">相关代码已经放到 Github</a></li></ul><figure class="highlight txt"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br></pre></td><td class="code"><pre><code class="hljs txt">HelloGPT(<br> (tok_embeddings): Embedding(32765, 768)<br> (rotary_emb): RotaryEmbedding(head_dim=64, max_seq_len=1024)<br> (layers): ModuleList(<br> (0-11): 12 x Decoder(<br> (ln1): RMSNorm(hidden_size=768, eps=1e-06)<br> (attn): Attention(<br> (q_proj): Linear(in_features=768, out_features=768, bias=False)<br> (k_proj): Linear(in_features=768, out_features=768, bias=False)<br> (v_proj): Linear(in_features=768, out_features=768, bias=False)<br> (o_proj): Linear(in_features=768, out_features=768, bias=False)<br> )<br> (ln2): RMSNorm(hidden_size=768, eps=1e-06)<br> (mlp): MLP(<br> (gate_proj): Linear(in_features=768, out_features=1536, bias=False)<br> (up_proj): Linear(in_features=768, out_features=1536, bias=False)<br> (down_proj): Linear(in_features=1536, out_features=768, bias=False)<br> )<br> )<br> )<br> (norm): RMSNorm(hidden_size=768, eps=1e-06)<br> (ln2): Linear(in_features=768, out_features=32765, bias=False)<br>)<br></code></pre></td></tr></table></figure><h2 id="配置环境">配置环境</h2><ul><li><a href="/-ji-lu--an-zhuang-conda-bing-geng-huan-qing-hua-yuan">安装conda</a></li></ul><figure class="highlight powershell"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br></pre></td><td class="code"><pre><code class="hljs powershell"><span class="hljs-built_in">cd</span> E:\GPT<br>conda install mamba <span class="hljs-literal">-c</span> conda<span class="hljs-literal">-forge</span><br>mamba create <span class="hljs-literal">-n</span> HelloGPT pytorch pytorch<span class="hljs-literal">-cuda</span>=<span class="hljs-number">12.1</span> <span class="hljs-literal">-c</span> pytorch <span class="hljs-literal">-c</span> nvidia <span class="hljs-literal">-c</span> conda<span class="hljs-literal">-forge</span><br>conda activate HelloGPT<br>conda install numpy transformers tiktoken tensorboard sentencepiece<span class="hljs-literal">-python</span> jieba emoji <span class="hljs-literal">-c</span> conda<span class="hljs-literal">-forge</span><br>pip install opencc<span class="hljs-literal">-python-reimplemented</span> <span class="hljs-literal">-i</span> https://pypi.tuna.tsinghua.edu.cn/simple<br>python test_cuda.py<br>python test_SPDA.py<br>D:\vscode\Code.exe<br></code></pre></td></tr></table></figure><h2 id="准备数据">准备数据</h2><ul><li>下载 <a href="https://hexo.limour.top/go/#aHR0cHM6Ly9odWdnaW5nZmFjZS5jby9jb2xsZWN0aW9ucy9MaW1vdXIvcjE4LW5vdmVscy1nYWxnYW1lLTY1OThmMTY4OTRjYWRjOWNkY2IzZjNhYg==" rel="noopener external nofollow noreferrer">h-corpus-2023</a></li></ul><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br><span class="line">36</span><br><span class="line">37</span><br><span class="line">38</span><br><span class="line">39</span><br><span class="line">40</span><br><span class="line">41</span><br><span class="line">42</span><br><span class="line">43</span><br><span class="line">44</span><br><span class="line">45</span><br><span class="line">46</span><br><span class="line">47</span><br><span class="line">48</span><br><span class="line">49</span><br><span class="line">50</span><br><span class="line">51</span><br><span class="line">52</span><br><span class="line">53</span><br><span class="line">54</span><br><span class="line">55</span><br><span class="line">56</span><br><span class="line">57</span><br><span class="line">58</span><br><span class="line">59</span><br><span class="line">60</span><br><span class="line">61</span><br><span class="line">62</span><br><span class="line">63</span><br><span class="line">64</span><br><span class="line">65</span><br><span class="line">66</span><br><span class="line">67</span><br><span class="line">68</span><br></pre></td><td class="code"><pre><code class="hljs python"><span class="hljs-keyword">import</span> os<br><br><span class="hljs-keyword">class</span> <span class="hljs-title class_">Fileset</span>(<span class="hljs-title class_ inherited__">list</span>):<br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">__init__</span>(<span class="hljs-params">self, path, ext=<span class="hljs-string">&#x27;&#x27;</span>, _read=<span class="hljs-literal">None</span></span>):<br> <span class="hljs-keyword">if</span> <span class="hljs-built_in">isinstance</span>(path, <span class="hljs-built_in">str</span>):<br> self.root = path<br> self.extend(f <span class="hljs-keyword">for</span> f <span class="hljs-keyword">in</span> os.listdir(self.root) <span class="hljs-keyword">if</span> f.endswith(ext))<br> self._read = _read<br><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">__getitem__</span>(<span class="hljs-params">self, index</span>):<br> <span class="hljs-keyword">if</span> <span class="hljs-built_in">isinstance</span>(index, <span class="hljs-built_in">int</span>): <span class="hljs-comment"># index是索引</span><br> <span class="hljs-keyword">if</span> self._read:<br> <span class="hljs-keyword">return</span> self._read(os.path.join(self.root, <span class="hljs-built_in">super</span>().__getitem__(index)))<br> <span class="hljs-keyword">else</span>:<br> <span class="hljs-keyword">return</span> os.path.join(self.root, <span class="hljs-built_in">super</span>().__getitem__(index))<br> <span class="hljs-keyword">else</span>: <span class="hljs-comment"># index是切片</span><br> fileset = Fileset(<span class="hljs-literal">None</span>)<br> fileset.root = self.root<br> fileset._read = self._read<br> fileset.extend(<span class="hljs-built_in">super</span>().__getitem__(index))<br> <span class="hljs-keyword">return</span> fileset<br><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">getFileName</span>(<span class="hljs-params">self, index</span>):<br> fname, ext = os.path.splitext(<span class="hljs-built_in">super</span>().__getitem__(index))<br> <span class="hljs-keyword">return</span> fname<br><br><br><span class="hljs-keyword">from</span> tokenizer <span class="hljs-keyword">import</span> tokenizer<br>token_eos = <span class="hljs-number">2</span><br><br><br><span class="hljs-keyword">def</span> <span class="hljs-title function_">readOne</span>(<span class="hljs-params">filePath</span>):<br> retn = []<br> <span class="hljs-keyword">with</span> <span class="hljs-built_in">open</span>(file=filePath, encoding=<span class="hljs-string">&#x27;utf-8&#x27;</span>) <span class="hljs-keyword">as</span> f:<br> <span class="hljs-keyword">for</span> line <span class="hljs-keyword">in</span> f:<br> retn += tokenizer.encode(line).ids<br> retn.append(token_eos)<br> <span class="hljs-keyword">return</span> retn<br><br><br><span class="hljs-keyword">class</span> <span class="hljs-title class_">Hcorpus</span>():<br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">__init__</span>(<span class="hljs-params">self, path, ext=<span class="hljs-string">&#x27;txt&#x27;</span>, fileset_idx=<span class="hljs-number">0</span>, fileset_sub_idx=<span class="hljs-number">0</span></span>):<br> self.fileset = Fileset(path, ext, readOne)<br> self.fileset_idx = fileset_idx<br> self.fileset_sub_idx = fileset_sub_idx<br> <span class="hljs-keyword">if</span> self.fileset_sub_idx &lt; <span class="hljs-number">0</span>: <span class="hljs-comment"># 再读上一个太复杂了,直接放弃</span><br> self.fileset_sub_idx = <span class="hljs-number">0</span><br> <span class="hljs-keyword">if</span> self.fileset_idx &gt;= <span class="hljs-built_in">len</span>(self.fileset):<br> self.fileset_idx = <span class="hljs-number">0</span><br> self.cache = self.fileset[self.fileset_idx]<br> self.fileset_idx += <span class="hljs-number">1</span><br> self.cache_idx = self.fileset_sub_idx<br><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">__call__</span>(<span class="hljs-params">self, size=<span class="hljs-number">512</span></span>):<br> <span class="hljs-keyword">while</span> <span class="hljs-built_in">len</span>(self.cache) &lt; self.cache_idx + size:<br> <span class="hljs-keyword">if</span> self.fileset_idx &gt;= <span class="hljs-built_in">len</span>(self.fileset):<br> self.fileset_idx = <span class="hljs-number">0</span><br> self.fileset_sub_idx = self.cache_idx - <span class="hljs-built_in">len</span>(self.cache)<br> self.cache = self.cache[self.cache_idx:] + self.fileset[self.fileset_idx]<br> self.cache_idx = <span class="hljs-number">0</span><br> self.fileset_idx += <span class="hljs-number">1</span><br> retn = self.cache[self.cache_idx:self.cache_idx + size]<br> self.cache_idx += size<br> self.fileset_sub_idx += size<br> <span class="hljs-keyword">return</span> retn<br><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">__repr__</span>(<span class="hljs-params">self</span>):<br> <span class="hljs-keyword">return</span> <span class="hljs-string">f&quot;Hcorpus(r&#x27;<span class="hljs-subst">&#123;self.fileset.root&#125;</span>&#x27;, fileset_idx=<span class="hljs-subst">&#123;self.fileset_idx-<span class="hljs-number">1</span>&#125;</span>, fileset_sub_idx=<span class="hljs-subst">&#123;self.fileset_sub_idx&#125;</span>)&quot;</span><br></code></pre></td></tr></table></figure><h2 id="训练Tokenizer">训练Tokenizer</h2><ul><li><a href="https://hexo.limour.top/go/#aHR0cHM6Ly9odWdnaW5nZmFjZS5jby9kb2NzL3Rva2VuaXplcnMvcXVpY2t0b3Vy" rel="noopener external nofollow noreferrer">tokenizer 包的文档</a></li><li>繁体转换成简体:<a href="https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL0xpbW91ci1kZXYvSGVsbG9HUFQvYmxvYi9tYWluL3RyYWluX3Rva2VuaXplcl9wcmUucHk=" rel="noopener external nofollow noreferrer">train_tokenizer_pre.py</a></li><li>获取常用 emoji:<a href="https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL0xpbW91ci1kZXYvSGVsbG9HUFQvYmxvYi9tYWluL3RtcF9lbW9qaS5weQ==" rel="noopener external nofollow noreferrer">tmp_emoji.py</a></li><li>分词统计词频:<a href="https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL0xpbW91ci1kZXYvSGVsbG9HUFQvYmxvYi9tYWluL3RyYWluX3Rva2VuaXplcl9qaWViYS5weQ==" rel="noopener external nofollow noreferrer">tokenizer_jieba.py</a></li><li>区分词性并构造 BPE 语料:<a href="https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL0xpbW91ci1kZXYvSGVsbG9HUFQvYmxvYi9tYWluL3RyYWluX3Rva2VuaXplcl9qaWViYV9zdGF0aXN0aWNzLnB5" rel="noopener external nofollow noreferrer">train_tokenizer_jieba_statistics.py</a></li><li>训练 BPE 模型:<a href="https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL0xpbW91ci1kZXYvSGVsbG9HUFQvYmxvYi9tYWluL3RyYWluX3Rva2VuaXplci5weQ==" rel="noopener external nofollow noreferrer">train_tokenizer.py</a></li><li>最终训练好的 BPE 模型:<a href="https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL0xpbW91ci1kZXYvSGVsbG9HUFQvYmxvYi9tYWluL0hlbGxvQlBFLnRva2VuaXplci5qc29u" rel="noopener external nofollow noreferrer">HelloBPE.tokenizer.json</a></li></ul><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><code class="hljs python"><span class="hljs-keyword">from</span> tokenizers <span class="hljs-keyword">import</span> Tokenizer<br>tokenizer = Tokenizer.from_file(<span class="hljs-string">&quot;HelloBPE.tokenizer.json&quot;</span>)<br></code></pre></td></tr></table></figure><h2 id="定义模型">定义模型</h2><h3 id="定义-Decoder">定义 Decoder</h3><h4 id="定义-RMSnorm">定义 RMSnorm</h4><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br></pre></td><td class="code"><pre><code class="hljs python"><span class="hljs-keyword">class</span> <span class="hljs-title class_">RMSNorm</span>(nn.Module):<br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">__init__</span>(<span class="hljs-params">self, dim: <span class="hljs-built_in">int</span>, eps: <span class="hljs-built_in">float</span> = <span class="hljs-number">1e-6</span></span>):<br> <span class="hljs-built_in">super</span>().__init__()<br> self.eps = eps<br> self.weight = nn.Parameter(torch.ones(dim))<br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">forward</span>(<span class="hljs-params">self, x</span>):<br> x = x * torch.rsqrt(x.<span class="hljs-built_in">pow</span>(<span class="hljs-number">2</span>).mean(-<span class="hljs-number">1</span>, keepdim=<span class="hljs-literal">True</span>) + self.eps)<br> <span class="hljs-keyword">return</span> x * self.weight<br></code></pre></td></tr></table></figure><h4 id="定义-RoPE">定义 RoPE</h4><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br></pre></td><td class="code"><pre><code class="hljs python"><span class="hljs-keyword">class</span> <span class="hljs-title class_">RotaryEmbedding</span>(nn.Module):<br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">__init__</span>(<span class="hljs-params">self, head_dim: <span class="hljs-built_in">int</span>, max_seq_len: <span class="hljs-built_in">int</span>, device=device, theta: <span class="hljs-built_in">float</span> = <span class="hljs-number">10000.0</span></span>):<br> <span class="hljs-built_in">super</span>().__init__()<br> self.head_dim = head_dim<br> self.set_max_seq_len(max_seq_len, device, theta)<br><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">set_max_seq_len</span>(<span class="hljs-params">self, max_seq_len: <span class="hljs-built_in">int</span>, device=device, theta: <span class="hljs-built_in">float</span> = <span class="hljs-number">10000.0</span></span>):<br> self.max_seq_len = max_seq_len<br> freqs = <span class="hljs-number">1.0</span> / (theta ** (torch.arange(<span class="hljs-number">0</span>, self.head_dim, <span class="hljs-number">2</span>).<span class="hljs-built_in">float</span>().to(device) / self.head_dim))<br> t = torch.arange(max_seq_len, device=device) <span class="hljs-comment"># type: ignore</span><br> freqs = torch.outer(t, freqs).<span class="hljs-built_in">float</span>() <span class="hljs-comment"># 外积</span><br> self.freqs_cis = torch.polar(torch.ones_like(freqs), freqs) <span class="hljs-comment"># 复数,模 1,角度 freqs</span><br> self.freqs_cis.requires_grad = <span class="hljs-literal">False</span> <span class="hljs-comment"># filter(lambda p : p.requires_grad, model.parameters())</span><br><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">rotary_emb</span>(<span class="hljs-params">self, x</span>):<br> x_ = torch.view_as_complex(x.<span class="hljs-built_in">float</span>().reshape(*x.shape[:-<span class="hljs-number">1</span>], -<span class="hljs-number">1</span>, <span class="hljs-number">2</span>))<br> x_out = torch.view_as_real(x_ * self.local_freqs_cis).flatten(<span class="hljs-number">3</span>)<br> <span class="hljs-keyword">return</span> x_out.type_as(x)<br><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">forward</span>(<span class="hljs-params">self, start_pos: <span class="hljs-built_in">int</span>, seqlen: <span class="hljs-built_in">int</span></span>):<br> self.local_freqs_cis = self.freqs_cis[start_pos: start_pos + seqlen].view(<span class="hljs-number">1</span>, seqlen, <span class="hljs-number">1</span>, -<span class="hljs-number">1</span>) <span class="hljs-comment"># cacheKV 相关,可忽略</span><br> self.local_freqs_cis.requires_grad = <span class="hljs-literal">False</span><br> <span class="hljs-keyword">return</span> self.rotary_emb<br></code></pre></td></tr></table></figure><h4 id="定义-Attention">定义 Attention</h4><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br></pre></td><td class="code"><pre><code class="hljs python"><span class="hljs-keyword">class</span> <span class="hljs-title class_">Attention</span>(nn.Module):<br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">__init__</span>(<span class="hljs-params">self, hidden_size, n_heads, cacheKV, max_batch_size, max_seq_len, device=device</span>):<br> <span class="hljs-built_in">super</span>().__init__()<br> self.n_heads = n_heads<br> self.head_dim = hidden_size // n_heads<br> self.q_proj = nn.Linear(hidden_size, hidden_size, bias=<span class="hljs-literal">False</span>)<br> self.k_proj = nn.Linear(hidden_size, hidden_size, bias=<span class="hljs-literal">False</span>)<br> self.v_proj = nn.Linear(hidden_size, hidden_size, bias=<span class="hljs-literal">False</span>)<br> self.o_proj = nn.Linear(hidden_size, hidden_size, bias=<span class="hljs-literal">False</span>)<br><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">forward</span>(<span class="hljs-params">self, hidden_states, rotary_emb, start_pos=<span class="hljs-number">0</span>, mask=<span class="hljs-literal">None</span>, is_causal=<span class="hljs-literal">True</span></span>):<br> bsz, seqlen, hidden_size = hidden_states.shape<br><br> q = self.q_proj(hidden_states)<br> k = self.k_proj(hidden_states)<br> v = self.v_proj(hidden_states)<br><br> q = q.view(bsz, seqlen, self.n_heads, self.head_dim)<br> k = k.view(bsz, seqlen, self.n_heads, self.head_dim)<br> v = v.view(bsz, seqlen, self.n_heads, self.head_dim)<br><br> q = rotary_emb(q)<br> k = rotary_emb(k)<br><br> q = q.transpose(<span class="hljs-number">1</span>, <span class="hljs-number">2</span>) <span class="hljs-comment"># (bs, n_heads, seqlen, head_dim)</span><br> k = k.transpose(<span class="hljs-number">1</span>, <span class="hljs-number">2</span>) <span class="hljs-comment"># (bs, n_local_heads, cache_len + seqlen, head_dim)</span><br> v = v.transpose(<span class="hljs-number">1</span>, <span class="hljs-number">2</span>) <span class="hljs-comment"># (bs, n_local_heads, cache_len + seqlen, head_dim)</span><br><br> output = F.scaled_dot_product_attention(q, k, v, attn_mask=mask, is_causal=is_causal)<br><br> output = output.transpose(<span class="hljs-number">1</span>, <span class="hljs-number">2</span>).contiguous().view(bsz, seqlen, hidden_size)<br> <span class="hljs-keyword">return</span> self.o_proj(output)<br></code></pre></td></tr></table></figure><h4 id="定义-MLP">定义 MLP</h4><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br></pre></td><td class="code"><pre><code class="hljs python"><span class="hljs-keyword">class</span> <span class="hljs-title class_">MLP</span>(nn.Module):<br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">__init__</span>(<span class="hljs-params">self, hidden_size</span>):<br> <span class="hljs-built_in">super</span>().__init__()<br> intermediate_size = <span class="hljs-built_in">int</span>(<span class="hljs-number">2</span> * hidden_size)<br> self.gate_proj = nn.Linear(hidden_size, intermediate_size, bias=<span class="hljs-literal">False</span>)<br> self.up_proj = nn.Linear(hidden_size, intermediate_size, bias=<span class="hljs-literal">False</span>)<br> self.down_proj = nn.Linear(intermediate_size, hidden_size, bias=<span class="hljs-literal">False</span>)<br><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">forward</span>(<span class="hljs-params">self, x</span>):<br> gate = F.silu(self.gate_proj(x))<br> intermediate_states = self.up_proj(x)<br> <span class="hljs-keyword">return</span> self.down_proj(gate * intermediate_states)<br></code></pre></td></tr></table></figure><h4 id="组装-Decoder">组装 Decoder</h4><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br></pre></td><td class="code"><pre><code class="hljs python"><span class="hljs-keyword">class</span> <span class="hljs-title class_">Decoder</span>(nn.Module):<br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">__init__</span>(<span class="hljs-params">self, hidden_size, n_heads, cacheKV, max_batch_size, max_seq_len</span>):<br> <span class="hljs-built_in">super</span>().__init__()<br> self.ln1 = RMSNorm(hidden_size)<br> self.attn = Attention(hidden_size, n_heads, cacheKV, max_batch_size, max_seq_len)<br> self.ln2 = RMSNorm(hidden_size)<br> self.mlp = MLP(hidden_size)<br><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">forward</span>(<span class="hljs-params">self, x, rotary_emb, start_pos, mask=<span class="hljs-literal">None</span>, is_causal=<span class="hljs-literal">True</span></span>):<br> x = x + self.attn(self.ln1(x), rotary_emb, start_pos, mask, is_causal)<br> <span class="hljs-keyword">return</span> x + self.mlp(self.ln2(x))<br></code></pre></td></tr></table></figure><h3 id="组装模型">组装模型</h3><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br></pre></td><td class="code"><pre><code class="hljs python"><span class="hljs-keyword">class</span> <span class="hljs-title class_">HelloGPT</span>(nn.Module):<br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">__init__</span>(<span class="hljs-params">self, vocab_size=<span class="hljs-number">32765</span>, hidden_size=<span class="hljs-number">768</span>, n_heads=<span class="hljs-number">12</span>, max_seq_len=<span class="hljs-number">1024</span>, n_layers=<span class="hljs-number">12</span>, cacheKV=<span class="hljs-literal">False</span>, max_batch_size=<span class="hljs-number">1</span></span>):<br> <span class="hljs-built_in">super</span>().__init__()<br> <span class="hljs-comment"># hidden_size &gt; 8.33 * ln(vocab_size)</span><br> self.tok_embeddings = nn.Embedding(vocab_size, hidden_size)<br> self.rotary_emb = RotaryEmbedding(hidden_size // n_heads, max_seq_len * <span class="hljs-number">2</span>)<br> self.rotary_emb.requires_grad = <span class="hljs-literal">False</span><br> self.layers = nn.ModuleList()<br> <span class="hljs-keyword">for</span> layer_id <span class="hljs-keyword">in</span> <span class="hljs-built_in">range</span>(n_layers):<br> self.layers.append(Decoder(hidden_size, n_heads, cacheKV, max_batch_size, max_seq_len))<br> self.norm = RMSNorm(hidden_size)<br> self.ln2 = nn.Linear(hidden_size, vocab_size, bias=<span class="hljs-literal">False</span>)<br><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">forward</span>(<span class="hljs-params">self, input_ids: torch.Tensor, start_pos=<span class="hljs-number">0</span>, no_mask=<span class="hljs-literal">True</span></span>):<br> _bsz, seqlen = input_ids.shape<br> h = self.tok_embeddings(input_ids)<br><br> <span class="hljs-comment"># 预计算,减少每一层的重复计算</span><br> rotary_emb = self.rotary_emb(start_pos, seqlen)<br> <span class="hljs-keyword">for</span> layer <span class="hljs-keyword">in</span> self.layers:<br> h = layer(h, rotary_emb, start_pos)<br><br> h = self.norm(h)<br> h = self.ln2(h)<br> <span class="hljs-keyword">return</span> h.<span class="hljs-built_in">float</span>()<br></code></pre></td></tr></table></figure><h2 id="训练模型">训练模型</h2><h3 id="数据载入">数据载入</h3><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br></pre></td><td class="code"><pre><code class="hljs python">data = Hcorpus(<span class="hljs-string">r&#x27;D:\datasets\h-corpus&#x27;</span>)<br><span class="hljs-keyword">def</span> <span class="hljs-title function_">get_batch</span>(<span class="hljs-params">size=<span class="hljs-number">512</span>, bsz=<span class="hljs-number">8</span></span>):<br> x = []<br> y = []<br> <span class="hljs-keyword">for</span> i <span class="hljs-keyword">in</span> <span class="hljs-built_in">range</span>(bsz):<br> tmp = data(size+<span class="hljs-number">1</span>)<br> x.append(tmp[:size])<br> y.append(tmp[<span class="hljs-number">1</span>:])<br> <span class="hljs-keyword">return</span> torch.tensor(x).to(device), torch.tensor(y).to(device)<br></code></pre></td></tr></table></figure><h3 id="模型载入">模型载入</h3><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><code class="hljs python">model = HelloGPT(n_layers=<span class="hljs-number">8</span>, max_seq_len=<span class="hljs-number">768</span>)<br>model.to(device)<br></code></pre></td></tr></table></figure><h3 id="训练模型-2">训练模型</h3><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br></pre></td><td class="code"><pre><code class="hljs python"><span class="hljs-comment">## 初始化训练器</span><br>criterion = nn.CrossEntropyLoss() <span class="hljs-comment"># 交叉熵损失函数</span><br>optimizer = torch.optim.Adam(train_parameters, lr=<span class="hljs-number">6e-4</span>) <span class="hljs-comment"># Adam 优化器</span><br>scheduler = CosineAnnealingWarmRestarts(optimizer, T_0=<span class="hljs-number">5</span>, T_mult=<span class="hljs-number">2</span>) <span class="hljs-comment"># 余弦退火学习率</span><br>torch.manual_seed(<span class="hljs-number">1337</span>) <span class="hljs-comment"># 魔术随机种子</span><br><br>total_loss = <span class="hljs-number">0</span><br>print_iter = <span class="hljs-number">20</span><br><span class="hljs-keyword">for</span> epoch <span class="hljs-keyword">in</span> <span class="hljs-built_in">range</span>(<span class="hljs-number">1</span>, <span class="hljs-number">100001</span>):<br> optimizer.zero_grad(set_to_none=<span class="hljs-literal">True</span>) <span class="hljs-comment"># 清空梯度,节省显存</span><br> x, y = get_batch(size=<span class="hljs-number">384</span>, bsz=<span class="hljs-number">4</span>) <span class="hljs-comment"># x 是训练语料 y 是 x 移动了一位,当做预测目标</span><br> y_ = model(x) <span class="hljs-comment"># 通过 x 预测的 y</span><br> loss = criterion(y_.view(-<span class="hljs-number">1</span>, <span class="hljs-number">32765</span>), y.view(-<span class="hljs-number">1</span>)) <span class="hljs-comment"># 计算损失</span><br> loss.backward() <span class="hljs-comment"># 反向传播梯度</span><br> torch.nn.utils.clip_grad_norm_(train_parameters, <span class="hljs-number">0.5</span>) <span class="hljs-comment"># 梯度裁剪,减轻过拟合</span><br> optimizer.step() <span class="hljs-comment"># 通过梯度优化训练参数</span><br> scheduler.step() <span class="hljs-comment"># 计算下一步的学习率</span><br> total_loss += loss <span class="hljs-comment"># 累计损失</span><br><br> <span class="hljs-keyword">if</span> epoch % print_iter == <span class="hljs-number">0</span>:<br> <span class="hljs-built_in">print</span>(data)<br> <span class="hljs-built_in">print</span>(<span class="hljs-string">f&#x27;epoch: <span class="hljs-subst">&#123;epoch&#125;</span> lr: <span class="hljs-subst">&#123;scheduler.get_last_lr()[<span class="hljs-number">0</span>]:<span class="hljs-number">.4</span>e&#125;</span> loss: <span class="hljs-subst">&#123;total_loss / print_iter:<span class="hljs-number">.4</span>e&#125;</span>&#x27;</span>)<br> total_loss = <span class="hljs-number">0</span><br></code></pre></td></tr></table></figure><h3 id="保存读取">保存读取</h3><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br></pre></td><td class="code"><pre><code class="hljs python"><span class="hljs-keyword">with</span> <span class="hljs-built_in">open</span>(<span class="hljs-string">&#x27;tmp_training.pkl&#x27;</span>, <span class="hljs-string">&#x27;rb&#x27;</span>) <span class="hljs-keyword">as</span> file:<br> epoch = pickle.load(file) <span class="hljs-comment"># 读取 epoch 位置</span><br> tmp_fileset_idx = pickle.load(file) <span class="hljs-comment"># 读取 data 位置</span><br> tmp_fileset_sub_idx = pickle.load(file)<br><span class="hljs-comment"># 恢复数据位置</span><br>data = Hcorpus(<span class="hljs-string">r&#x27;D:\datasets\h-corpus&#x27;</span>, fileset_idx=tmp_fileset_idx-<span class="hljs-number">1</span>, fileset_sub_idx=tmp_fileset_sub_idx)<br>model = torch.load(<span class="hljs-string">f&#x27;tmp_model_<span class="hljs-subst">&#123;epoch&#125;</span>.pth&#x27;</span>) <span class="hljs-comment"># 恢复模型</span><br><span class="hljs-built_in">print</span>(<span class="hljs-string">f&#x27;start from epoch: <span class="hljs-subst">&#123;epoch&#125;</span> data: <span class="hljs-subst">&#123;data&#125;</span>&#x27;</span>)<br><br>save_iter = <span class="hljs-number">5000</span><br><span class="hljs-keyword">for</span> epoch <span class="hljs-keyword">in</span> <span class="hljs-built_in">range</span>(<span class="hljs-number">1</span>, <span class="hljs-number">100001</span>):<br> <span class="hljs-keyword">pass</span><br> <span class="hljs-keyword">if</span> epoch % save_iter == <span class="hljs-number">0</span>:<br> optimizer.zero_grad(set_to_none=<span class="hljs-literal">True</span>) <span class="hljs-comment"># 清空梯度,节省显存</span><br> <span class="hljs-keyword">with</span> <span class="hljs-built_in">open</span>(<span class="hljs-string">&#x27;tmp_training.pkl&#x27;</span>, <span class="hljs-string">&#x27;wb&#x27;</span>) <span class="hljs-keyword">as</span> file:<br> pickle.dump(epoch, file) <span class="hljs-comment"># 保存 epoch 位置</span><br> pickle.dump(data.fileset_idx, file) <span class="hljs-comment"># 保存 data 位置</span><br> pickle.dump(data.fileset_sub_idx, file)<br> torch.save(model, <span class="hljs-string">f&#x27;tmp_model_<span class="hljs-subst">&#123;epoch&#125;</span>.pth&#x27;</span>) <span class="hljs-comment"># 保存模型</span><br> <span class="hljs-built_in">print</span>(<span class="hljs-string">f&#x27;save to tmp_model_<span class="hljs-subst">&#123;epoch&#125;</span>.pth&#x27;</span>)<br></code></pre></td></tr></table></figure><h3 id="可视化">可视化</h3><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br></pre></td><td class="code"><pre><code class="hljs python">writer = SummaryWriter(<span class="hljs-string">&#x27;logs&#x27;</span>) <span class="hljs-comment"># tensorboard --logdir logs</span><br><span class="hljs-keyword">for</span> epoch <span class="hljs-keyword">in</span> <span class="hljs-built_in">range</span>(<span class="hljs-number">1</span>, <span class="hljs-number">100001</span>):<br> <span class="hljs-keyword">pass</span><br> writer.add_scalar(<span class="hljs-string">&#x27;lr&#x27;</span>, scheduler.get_last_lr()[<span class="hljs-number">0</span>], epoch)<br> writer.add_scalar(<span class="hljs-string">&#x27;loss&#x27;</span>, loss, epoch)<br> <span class="hljs-keyword">if</span> epoch % print_iter == <span class="hljs-number">0</span>:<br> <span class="hljs-keyword">pass</span><br> writer.add_scalar(<span class="hljs-string">&#x27;total_loss&#x27;</span>, total_loss / print_iter, epoch)<br>writer.close()<br></code></pre></td></tr></table></figure><h2 id="附加-streaming-llm">附加 streaming_llm</h2><figure class="highlight python"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br><span class="line">36</span><br><span class="line">37</span><br><span class="line">38</span><br></pre></td><td class="code"><pre><code class="hljs python"><span class="hljs-keyword">class</span> <span class="hljs-title class_">RotaryEmbedding</span>(nn.Module):<br> <span class="hljs-keyword">pass</span><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">inverse_rotary_emb</span>(<span class="hljs-params">self, x</span>):<br> x_ = torch.view_as_complex(x.<span class="hljs-built_in">float</span>().reshape(*x.shape[:-<span class="hljs-number">1</span>], -<span class="hljs-number">1</span>, <span class="hljs-number">2</span>))<br> x_out = torch.view_as_real(x_ * self.local_freqs_cis_inverse).flatten(<span class="hljs-number">3</span>)<br> <span class="hljs-keyword">return</span> x_out.type_as(x)<br><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">inverse_forward</span>(<span class="hljs-params">self, start_pos: <span class="hljs-built_in">int</span>, seqlen: <span class="hljs-built_in">int</span></span>):<br> self.local_freqs_cis_inverse = self.freqs_cis[start_pos: start_pos + seqlen].view(<span class="hljs-number">1</span>, seqlen, <span class="hljs-number">1</span>, -<span class="hljs-number">1</span>) <span class="hljs-comment"># cacheKV 相关,可忽略</span><br> self.local_freqs_cis_inverse = self.local_freqs_cis_inverse.conj() <span class="hljs-comment"># 乘上共轭就旋转回去了</span><br> self.local_freqs_cis.requires_grad = <span class="hljs-literal">False</span><br> <span class="hljs-keyword">return</span> self.inverse_rotary_emb<br><br><span class="hljs-keyword">class</span> <span class="hljs-title class_">Attention</span>(nn.Module):<br> <span class="hljs-keyword">pass</span><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">forward</span>(<span class="hljs-params">self, hidden_states, rotary_emb, start_pos=<span class="hljs-number">0</span>, mask=<span class="hljs-literal">None</span>, is_causal=<span class="hljs-literal">True</span></span>):<br> <span class="hljs-keyword">pass</span><br> <span class="hljs-keyword">if</span> self.cacheKV: <span class="hljs-comment"># cacheKV 相关,可忽略</span><br> self.cache_k[:bsz, start_pos: start_pos + seqlen] = k<br> self.cache_v[:bsz, start_pos: start_pos + seqlen] = v<br> k = self.cache_k[:bsz, : start_pos + seqlen]<br> v = self.cache_v[:bsz, : start_pos + seqlen]<br><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">streaming_llm</span>(<span class="hljs-params">self, start_pos, seqlen, to_pos, inverse_rotary_emb, rotary_emb, bsz</span>):<br> k = self.cache_k[:bsz, start_pos: start_pos + seqlen]<br> v = self.cache_v[:bsz, start_pos: start_pos + seqlen]<br> k = inverse_rotary_emb(k)<br> k = rotary_emb(k)<br> self.cache_k[:bsz, to_pos: to_pos + seqlen] = k<br> self.cache_v[:bsz, to_pos: to_pos + seqlen] = v<br><br><span class="hljs-keyword">class</span> <span class="hljs-title class_">HelloGPT</span>(nn.Module):<br> <span class="hljs-keyword">pass</span><br> <span class="hljs-keyword">def</span> <span class="hljs-title function_">streaming_llm</span>(<span class="hljs-params">self, start_pos, seqlen, to_pos, max_batch_size=<span class="hljs-number">1</span></span>):<br> rotary_emb = self.rotary_emb(to_pos, seqlen)<br> inverse_rotary_emb = self.rotary_emb.inverse_forward(start_pos, seqlen)<br> <span class="hljs-keyword">for</span> layer <span class="hljs-keyword">in</span> self.layers:<br> layer.attn.streaming_llm(start_pos, seqlen, to_pos, inverse_rotary_emb, rotary_emb, max_batch_size)<br></code></pre></td></tr></table></figure>]]></content:encoded>
  130. <category domain="https://hexo.limour.top/tags/%E6%8E%A2%E7%B4%A2/">探索</category>
  131. <category domain="https://hexo.limour.top/tags/llama/">llama</category>
  132. <comments>https://hexo.limour.top/training-gpt-from-scratch#disqus_thread</comments>
  133. </item>
  134. <item>
  135. <title>【避坑】Azure AI 避免反向薅羊毛</title>
  136. <link>https://hexo.limour.top/Azure-AI-prevents-reverse-wool-shearing</link>
  137. <guid>https://hexo.limour.top/Azure-AI-prevents-reverse-wool-shearing</guid>
  138. <pubDate>Tue, 09 Jan 2024 05:55:40 GMT</pubDate>
  139. <description>&lt;h2 id=&quot;起因&quot;&gt;起因&lt;/h2&gt;
  140. &lt;p&gt;今天收到 Azure 的付费邮件,一看账单,好家伙,24.54$ ,比上个月暴涨 622%,给我 CPU 干烧了。&lt;/p&gt;
  141. &lt;p&gt;赶紧去成本分析里按资源分类看上个月的扣费详情,然后就看到两个 10.33$ 的 &lt;code&gt;Contai</description>
  142. <content:encoded><![CDATA[<h2 id="起因">起因</h2><p>今天收到 Azure 的付费邮件,一看账单,好家伙,24.54$ ,比上个月暴涨 622%,给我 CPU 干烧了。</p><p>赶紧去成本分析里按资源分类看上个月的扣费详情,然后就看到两个 10.33$ 的 <code>Container Registry</code>,分别位于我在 <a href="https://hexo.limour.top/go/#aHR0cDovL2FpLmF6dXJlLmNvbS8=" rel="noopener external nofollow noreferrer">Azure AI Studio</a> 里的两个不同项目所在区域。</p><p>一顿折腾,发现这个 Container Registry,有一年的免费试用期,但是免费限额是 31/个/天,一个 15 天刚好是 10.33$ 。</p><p>这 Azure 不讲武德,这样免费,头半个月根本不知道这东西要收费,等月末美滋滋去付账单时钱都已经扣完了。。。</p><p>特别是,这东西似乎是 Azure AI Studio 自动开通的,我根本没有用到过它。心情更糟了。</p><p><img src="https://img.limour.top/2024/01/09/659ce07c76fd0.webp" alt=""></p><h2 id="解决方案">解决方案</h2><p>赶紧去资源组里找到这两个<code>容器注册表</code>,全给删了。删除后不会对 Azure AI 的使用产生影响。</p><p>然后是想办法提工单,看能不能把这钱退回来。</p><p><img src="https://img.limour.top/2024/01/09/659ce568e9756.webp" alt="最后保留的服务,不知道哪些还可以删"></p><h2 id="工单结果">工单结果</h2><blockquote><p>透过案件了解到Container Registry是您不清楚的情况下创建的,且您已经将此资源进行了删除。考虑到您是首次使用Azure产品较不熟悉,且已经将资源删除,经过竭力向主管团队申请,现为您申请了相关费用的减免,即:<br>12/1/2023-12/31/2023期间由Container Registry – Standard产生的费用20.66 USD已经申请退回至您的信用卡,依据银行流程,款项约需要7-21个工作日抵达您的账户,届时请您查看。<br>同时,我们也查看了您当前的计费周期(1/1/2024-1/31/2024)的使用量报表,Container Registry – Standard未产生费用,还请您放心。</p></blockquote>]]></content:encoded>
  143. <category domain="https://hexo.limour.top/tags/openai/">openai</category>
  144. <comments>https://hexo.limour.top/Azure-AI-prevents-reverse-wool-shearing#disqus_thread</comments>
  145. </item>
  146. <item>
  147. <title>【记录】win10平台6G显存运行Qwen-1.8B</title>
  148. <link>https://hexo.limour.top/Running-Qwen-on-the-Win10-platform-with-6GB-of-video-memory</link>
  149. <guid>https://hexo.limour.top/Running-Qwen-on-the-Win10-platform-with-6GB-of-video-memory</guid>
  150. <pubDate>Mon, 01 Jan 2024 03:11:36 GMT</pubDate>
  151. <description>&lt;p&gt;&lt;a href=&quot;https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL2dnZXJnYW5vdi9sbGFtYS5jcHA=&quot; rel=&quot;noopener external nofollow noreferrer&quot;&gt;Ll</description>
  152. <content:encoded><![CDATA[<p><a href="https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL2dnZXJnYW5vdi9sbGFtYS5jcHA=" rel="noopener external nofollow noreferrer">Llama.cpp</a> 能 CPU &amp; GPU 环境混合推理,这里记录一下在 windows10 平台上运行 <a href="https://hexo.limour.top/go/#aHR0cHM6Ly9odWdnaW5nZmFjZS5jby9Rd2VuL1F3ZW4tMV84Qg==" rel="noopener external nofollow noreferrer">Qwen-1.8B</a> 的过程,显卡是 1660Ti 。</p><h2 id="准备模型">准备模型</h2><ul><li><a href="/-ji-lu--an-zhuang-conda-bing-geng-huan-qing-hua-yuan">安装conda</a></li><li><a href="/Use-Tunnel-to-speed-up-the-connection-of-VPS">Tun模式</a>(管理员权限)</li></ul><figure class="highlight powershell"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br></pre></td><td class="code"><pre><code class="hljs powershell">conda create <span class="hljs-literal">-n</span> llamaConvert python=<span class="hljs-number">3.10</span> git <span class="hljs-literal">-c</span> conda<span class="hljs-literal">-forge</span><br>conda activate llamaConvert<br><span class="hljs-built_in">cd</span> D:\llama<br>git clone <span class="hljs-literal">--depth</span>=<span class="hljs-number">1</span> https://github.com/ggerganov/llama.cpp.git<br><span class="hljs-built_in">cd</span> llama.cpp<br>python <span class="hljs-literal">-m</span> pip install <span class="hljs-literal">-r</span> requirements.txt<br>pip install tiktoken<br></code></pre></td></tr></table></figure><figure class="highlight powershell"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><code class="hljs powershell">python <span class="hljs-literal">-c</span> <span class="hljs-string">&quot;from huggingface_hub import snapshot_download; snapshot_download(repo_id=&#x27;Qwen/Qwen-1_8B-Chat&#x27;, local_dir=r&#x27;D:\qwen&#x27;, ignore_patterns=[&#x27;*.h5&#x27;, &#x27;*.ot&#x27;, &#x27;*.msgpack&#x27;, &#x27;*.safetensors&#x27;])&quot;</span><br><span class="hljs-built_in">cd</span> D:\qwen<br>D:\aria2\aria2c.exe <span class="hljs-literal">--all-proxy</span>=<span class="hljs-string">&#x27;http://127.0.0.1:7890&#x27;</span> <span class="hljs-literal">-o</span> <span class="hljs-string">&#x27;model-00001-of-00002.safetensors&#x27;</span> <span class="hljs-string">&quot;https://huggingface.co/Qwen/Qwen-1_8B-Chat/resolve/main/model-00001-of-00002.safetensors?download=true&quot;</span><br>D:\aria2\aria2c.exe <span class="hljs-literal">--all-proxy</span>=<span class="hljs-string">&#x27;http://127.0.0.1:7890&#x27;</span> <span class="hljs-literal">-o</span> <span class="hljs-string">&#x27;model-00002-of-00002.safetensors&#x27;</span> <span class="hljs-string">&quot;https://huggingface.co/Qwen/Qwen-1_8B-Chat/resolve/main/model-00002-of-00002.safetensors?download=true&quot;</span><br></code></pre></td></tr></table></figure><figure class="highlight powershell"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><code class="hljs powershell"><span class="hljs-built_in">cd</span> D:\llama\llama.cpp<br>python <span class="hljs-built_in">convert-hf</span><span class="hljs-literal">-to-gguf</span>.py D:\qwen<br><span class="hljs-comment"># Model successfully exported to &#x27;D:\qwen\ggml-model-f16.gguf&#x27;</span><br></code></pre></td></tr></table></figure><h2 id="运行模型">运行模型</h2><ul><li>下载 <a href="https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL2dnZXJnYW5vdi9sbGFtYS5jcHAvcmVsZWFzZXM=" rel="noopener external nofollow noreferrer">llama-b1732-bin-win-cublas-cu12.2.0-x64.zip</a></li><li>提取文件到 <code>D:\llama</code></li></ul><figure class="highlight powershell"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br></pre></td><td class="code"><pre><code class="hljs powershell">conda create <span class="hljs-literal">-n</span> llamaCpp libcublas cuda<span class="hljs-literal">-toolkit</span> git <span class="hljs-literal">-c</span> nvidia <span class="hljs-literal">-c</span> conda<span class="hljs-literal">-forge</span><br>conda activate llamaCpp<br><span class="hljs-built_in">cd</span> D:\llama ; .\main.exe <span class="hljs-comment">## 检查能否正确运行</span><br><span class="hljs-built_in">cd</span> D:\llama ; .\quantize.exe <span class="hljs-literal">--help</span> <span class="hljs-comment">## 自己决定量化方式</span><br>.\quantize.exe D:\qwen\ggml<span class="hljs-literal">-model-f16</span>.gguf .\qwen<span class="hljs-literal">-1_8-f16</span>.gguf <span class="hljs-built_in">COPY</span><br>.\server.exe <span class="hljs-literal">-m</span> .\qwen<span class="hljs-literal">-1_8-f16</span>.gguf <span class="hljs-literal">-c</span> <span class="hljs-number">4096</span> <span class="hljs-literal">--n-gpu-layers</span> <span class="hljs-number">50</span> <span class="hljs-comment">## 调节 n-gpu-layers 平衡 CPU &amp; GPU</span><br></code></pre></td></tr></table></figure><ul><li>访问 <code>http://127.0.0.1:8080</code> 选择 <code>Completion</code> 进行测试</li></ul><h2 id="微调模型">微调模型</h2><ul><li><a href="https://hexo.limour.top/go/#aHR0cHM6Ly9odWdnaW5nZmFjZS5jby9kYXRhc2V0cy9hNjg2ZDM4MC9oLWNvcnB1cy0yMDIz" rel="noopener external nofollow noreferrer">h-corpus数据集</a></li><li><a href="https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL1F3ZW5MTS9Rd2VuL2Jsb2IvbWFpbi9SRUFETUVfQ04ubWQjJUU1JUJFJUFFJUU4JUIwJTgz" rel="noopener external nofollow noreferrer">官方微调教程</a></li></ul><h2 id="附加-Yi-6B-Chat">附加 Yi-6B-Chat</h2><p><a href="https://hexo.limour.top/go/#aHR0cHM6Ly9odWdnaW5nZmFjZS5jby8wMS1haS9ZaS02Qi1DaGF0" rel="noopener external nofollow noreferrer">Yi-6B</a>是零一万物开源的双语语言模型,经过3T多语种语料库的训练,在语言理解、常识推理、阅读理解等方面有一定潜力。</p><figure class="highlight powershell"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br></pre></td><td class="code"><pre><code class="hljs powershell"><span class="hljs-built_in">cd</span> D:\models\<span class="hljs-number">01</span>yi<br>D:\aria2\aria2c.exe <span class="hljs-literal">--all-proxy</span>=<span class="hljs-string">&#x27;http://127.0.0.1:7890&#x27;</span> <span class="hljs-literal">-o</span> <span class="hljs-string">&#x27;model-00001-of-00003.safetensors&#x27;</span> <span class="hljs-string">&quot;https://huggingface.co/01-ai/Yi-6B-Chat/resolve/main/model-00001-of-00003.safetensors?download=true&quot;</span><br>D:\aria2\aria2c.exe <span class="hljs-literal">--all-proxy</span>=<span class="hljs-string">&#x27;http://127.0.0.1:7890&#x27;</span> <span class="hljs-literal">-o</span> <span class="hljs-string">&#x27;model-00002-of-00003.safetensors&#x27;</span> <span class="hljs-string">&quot;https://huggingface.co/01-ai/Yi-6B-Chat/resolve/main/model-00002-of-00003.safetensors?download=true&quot;</span><br>D:\aria2\aria2c.exe <span class="hljs-literal">--all-proxy</span>=<span class="hljs-string">&#x27;http://127.0.0.1:7890&#x27;</span> <span class="hljs-literal">-o</span> <span class="hljs-string">&#x27;model-00003-of-00003.safetensors&#x27;</span> https://huggingface.co/<span class="hljs-number">01</span><span class="hljs-literal">-ai</span>/Yi<span class="hljs-literal">-6B-Chat</span>/resolve/main/model<span class="hljs-literal">-00003-of-00003</span>.safetensors?download=true<br>conda activate llamaConvert<br>python <span class="hljs-literal">-c</span> <span class="hljs-string">&quot;from huggingface_hub import snapshot_download; snapshot_download(repo_id=&#x27;01-ai/Yi-6B-Chat&#x27;, local_dir=r&#x27;D:\models\01yi&#x27;, ignore_patterns=[&#x27;*.h5&#x27;, &#x27;*.ot&#x27;, &#x27;*.msgpack&#x27;, &#x27;*.safetensors&#x27;])&quot;</span><br></code></pre></td></tr></table></figure><figure class="highlight powershell"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br></pre></td><td class="code"><pre><code class="hljs powershell">conda activate llamaConvert<br><span class="hljs-built_in">cd</span> D:\llama\llama.cpp<br>python convert.py D:\models\<span class="hljs-number">01</span>yi<br><span class="hljs-comment"># Wrote D:\models\01yi\ggml-model-f16.gguf</span><br>conda activate llamaCpp<br><span class="hljs-built_in">cd</span> D:\llama ; .\quantize.exe <span class="hljs-literal">--help</span><br>.\quantize.exe D:\models\<span class="hljs-number">01</span>yi\ggml<span class="hljs-literal">-model-f16</span>.gguf .\<span class="hljs-number">01</span>yi<span class="hljs-literal">-6b-Q4_K_M</span>.gguf Q4_K_M<br>.\server.exe <span class="hljs-literal">-m</span> .\<span class="hljs-number">01</span>yi<span class="hljs-literal">-6b-Q4_K_M</span>.gguf <span class="hljs-literal">-c</span> <span class="hljs-number">4096</span> <span class="hljs-literal">--n-gpu-layers</span> <span class="hljs-number">50</span><br></code></pre></td></tr></table></figure><h2 id="附加-百川2">附加 百川2</h2><figure class="highlight powershell"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br></pre></td><td class="code"><pre><code class="hljs powershell"><span class="hljs-built_in">cd</span> D:\models\baichuan<br>D:\aria2\aria2c.exe <span class="hljs-literal">--all-proxy</span>=<span class="hljs-string">&#x27;http://127.0.0.1:7890&#x27;</span> <span class="hljs-literal">-o</span> <span class="hljs-string">&#x27;pytorch_model.bin&#x27;</span> <span class="hljs-string">&quot;https://huggingface.co/baichuan-inc/Baichuan2-7B-Chat/resolve/main/pytorch_model.bin?download=true&quot;</span><br>conda activate llamaConvert<br>python <span class="hljs-literal">-c</span> <span class="hljs-string">&quot;from huggingface_hub import snapshot_download; snapshot_download(repo_id=&#x27;baichuan-inc/Baichuan2-7B-Chat&#x27;, local_dir=r&#x27;D:\models\baichuan&#x27;, ignore_patterns=[&#x27;*.h5&#x27;, &#x27;*.bin&#x27;, &#x27;*.ot&#x27;, &#x27;*.msgpack&#x27;, &#x27;*.safetensors&#x27;])&quot;</span><br><span class="hljs-built_in">cd</span> D:\llama\llama.cpp<br>python convert.py D:\models\baichuan<br><span class="hljs-comment"># Wrote D:\models\baichuan\ggml-model-f16.gguf</span><br>conda activate llamaCpp<br><span class="hljs-built_in">cd</span> D:\llama ; .\quantize.exe <span class="hljs-literal">--help</span><br>.\quantize.exe D:\models\baichuan\ggml<span class="hljs-literal">-model-f16</span>.gguf .\baichuan<span class="hljs-literal">-7b-Q3_K_M</span>.gguf Q3_K_M<br>.\server.exe <span class="hljs-literal">-m</span> .\baichuan<span class="hljs-literal">-7b-Q3_K_M</span>.gguf <span class="hljs-literal">-c</span> <span class="hljs-number">2048</span> <span class="hljs-literal">--n-gpu-layers</span> <span class="hljs-number">30</span><br></code></pre></td></tr></table></figure><h2 id="附加-tigerbot-13b">附加 tigerbot-13b</h2><p><a href="https://hexo.limour.top/go/#aHR0cHM6Ly9odWdnaW5nZmFjZS5jby9UaWdlclJlc2VhcmNoL3RpZ2VyYm90LTEzYi1jaGF0LXY1" rel="noopener external nofollow noreferrer">tigerbot-13b</a> 在 <a href="https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL2plaW5sZWUxOTkxL2NoaW5lc2UtbGxtLWJlbmNobWFyaw==" rel="noopener external nofollow noreferrer">chinese-llm-benchmark</a> 上排名靠前。</p><figure class="highlight powershell"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br></pre></td><td class="code"><pre><code class="hljs powershell"><span class="hljs-built_in">cd</span> D:\models\tigerbot<br>D:\aria2\aria2c.exe <span class="hljs-literal">--all-proxy</span>=<span class="hljs-string">&#x27;http://127.0.0.1:7890&#x27;</span> <span class="hljs-literal">-o</span> <span class="hljs-string">&#x27;pytorch_model-00001-of-00003.bin&#x27;</span> <span class="hljs-literal">--max-download-limit</span>=<span class="hljs-number">6</span>M <span class="hljs-string">&quot;https://huggingface.co/TigerResearch/tigerbot-13b-chat-v5/resolve/main/pytorch_model-00001-of-00003.bin?download=true&quot;</span><br>D:\aria2\aria2c.exe <span class="hljs-literal">--all-proxy</span>=<span class="hljs-string">&#x27;http://127.0.0.1:7890&#x27;</span> <span class="hljs-literal">-o</span> <span class="hljs-string">&#x27;pytorch_model-00002-of-00003.bin&#x27;</span> <span class="hljs-literal">--max-download-limit</span>=<span class="hljs-number">6</span>M <span class="hljs-string">&quot;https://huggingface.co/TigerResearch/tigerbot-13b-chat-v5/resolve/main/pytorch_model-00002-of-00003.bin?download=true&quot;</span><br>D:\aria2\aria2c.exe <span class="hljs-literal">--all-proxy</span>=<span class="hljs-string">&#x27;http://127.0.0.1:7890&#x27;</span> <span class="hljs-literal">-o</span> <span class="hljs-string">&#x27;pytorch_model-00003-of-00003.bin&#x27;</span> <span class="hljs-literal">--max-download-limit</span>=<span class="hljs-number">6</span>M <span class="hljs-string">&quot;https://huggingface.co/TigerResearch/tigerbot-13b-chat-v5/resolve/main/pytorch_model-00003-of-00003.bin?download=true&quot;</span><br>conda activate llamaConvert<br>python <span class="hljs-literal">-c</span> <span class="hljs-string">&quot;from huggingface_hub import snapshot_download; snapshot_download(repo_id=&#x27;TigerResearch/tigerbot-13b-chat-v5&#x27;, local_dir=r&#x27;D:\models\tigerbot&#x27;, ignore_patterns=[&#x27;*.h5&#x27;, &#x27;*.bin&#x27;, &#x27;*.ot&#x27;, &#x27;*.msgpack&#x27;, &#x27;*.safetensors&#x27;])&quot;</span><br><span class="hljs-built_in">cd</span> D:\llama\llama.cpp<br>python convert.py D:\models\tigerbot <span class="hljs-literal">--padvocab</span><br><span class="hljs-built_in">cd</span> D:\llama ; .\quantize.exe <span class="hljs-literal">--help</span><br>.\quantize.exe D:\models\tigerbot\ggml<span class="hljs-literal">-model-f16</span>.gguf D:\models\tigerbot<span class="hljs-literal">-13B-Chat-Q4_K_M</span>.gguf Q4_K_M<br>.\server.exe <span class="hljs-literal">-m</span> D:\models\tigerbot<span class="hljs-literal">-13B-Chat-Q4_K_M</span>.gguf <span class="hljs-literal">-c</span> <span class="hljs-number">4096</span><br></code></pre></td></tr></table></figure><div class="note note-info"> <p>感觉 6G 显存下,比较好用的是 Yi-6B-Chat-Q4_K_M<br>tigerbot-13b 在 R5 5600H 上推理速度 4.6 tokens/s,CPU 使用率 60%,频率 3.5GHz,应该是内存带宽瓶颈</p> </div><h2 id="附加-在-Colab-上量化">附加 在 Colab 上量化</h2><ul><li><a href="https://hexo.limour.top/go/#aHR0cHM6Ly9jb2xhYi5yZXNlYXJjaC5nb29nbGUuY29tL2RyaXZlLzFKVDNYRmpEN0NUUkI5N3B1M1FwZUd1eldBMXlZRUFNNz91c3A9c2hhcmluZw==" rel="noopener external nofollow noreferrer">llm2gguf.ipynb</a></li><li><a href="https://hexo.limour.top/go/#aHR0cHM6Ly9odWdnaW5nZmFjZS5jby9MaW1vdXIvQ2F1c2FsTE0tMTRCLUdHVUY=" rel="noopener external nofollow noreferrer">量化后的结果</a></li></ul><h3 id="安装-llama-cpp">安装 llama.cpp</h3><figure class="highlight ipython"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><code class="hljs ipython">!git clone --depth=<span class="hljs-number">1</span> https://github.com/ggerganov/llama.cpp.git<br>%cd /content/llama.cpp<br>!LLAMA_CUDA=<span class="hljs-number">1</span> make -j<br></code></pre></td></tr></table></figure><h3 id="计算-imatrix">计算 imatrix</h3><figure class="highlight ipython"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br></pre></td><td class="code"><pre><code class="hljs ipython">%cd /content<br>!wget -O transient.txt.gz https://huggingface.co/datasets/Limour/b-corpus/resolve/main/<span class="hljs-number">00</span>-preview/<span class="hljs-number">00</span>-transient.txt.gz?download=true<br>!gunzip transient.txt.gz<br>!mkdir -p /content/CausalLM-14B-GGUF<br>!wget -O /content/CausalLM-14B-GGUF/causallm_14b.Q8_0.gguf https://huggingface.co/TheBloke/CausalLM-14B-GGUF/resolve/main/causallm_14b.Q8_0.gguf?download=true<br>!/content/llama.cpp/imatrix -m /content/CausalLM-14B-GGUF/causallm_14b.Q8_0.gguf -f /content/transient.txt -ngl <span class="hljs-number">36</span><br></code></pre></td></tr></table></figure><h3 id="登录拥抱脸">登录拥抱脸</h3><figure class="highlight ipython"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br></pre></td><td class="code"><pre><code class="hljs ipython"><span class="hljs-keyword">from</span> google.colab <span class="hljs-keyword">import</span> userdata<br><span class="hljs-keyword">from</span> huggingface_hub <span class="hljs-keyword">import</span> login<br><span class="hljs-comment"># login(token=os.environ.get(&quot;HF_TOKEN&quot;), write_permission=True)</span><br>login(token=userdata.get(<span class="hljs-string">&#x27;HF_TOKEN&#x27;</span>), write_permission=<span class="hljs-literal">True</span>)<br><span class="hljs-comment"># from huggingface_hub import notebook_login</span><br><span class="hljs-comment"># notebook_login()</span><br></code></pre></td></tr></table></figure><h3 id="跳过-转换模型">(跳过) 转换模型</h3><figure class="highlight ipython"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br></pre></td><td class="code"><pre><code class="hljs ipython">%cd llama.cpp<br>!python -m pip install -r requirements.txt<br>!pip install tiktoken<br><span class="hljs-keyword">from</span> huggingface_hub <span class="hljs-keyword">import</span> snapshot_download<br>!mkdir -p ~/CausalLM<br>snapshot_download(repo_id=<span class="hljs-string">&#x27;CausalLM/7B&#x27;</span>, local_dir=<span class="hljs-string">r&#x27;/content/CausalLM&#x27;</span>, ignore_patterns=[<span class="hljs-string">&#x27;*.h5&#x27;</span>, <span class="hljs-string">&#x27;*.ot&#x27;</span>, <span class="hljs-string">&#x27;*.msgpack&#x27;</span>, <span class="hljs-string">&#x27;*.safetensors&#x27;</span>])<br>!python convert.py --vocab-<span class="hljs-built_in">type</span> bpe --pad-vocab --outtype f16 /content/CausalLM <br></code></pre></td></tr></table></figure><h3 id="量化模型">量化模型</h3><figure class="highlight ipython"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><code class="hljs ipython">!/content/llama.cpp/quantize --allow-requantize --imatrix /content/imatrix.dat /content/CausalLM-14B-GGUF/causallm_14b.Q8_0.gguf /content/CausalLM-14B-GGUF/causallm_14b.IQ3_XS.gguf IQ3_XS<br></code></pre></td></tr></table></figure><h3 id="上传模型">上传模型</h3><figure class="highlight ipython"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br></pre></td><td class="code"><pre><code class="hljs ipython"><span class="hljs-keyword">from</span> huggingface_hub <span class="hljs-keyword">import</span> HfApi<br>api = HfApi()<br>api.upload_file(<br> path_or_fileobj=<span class="hljs-string">&quot;/content/CausalLM-14B-GGUF/causallm_14b.IQ3_XS.gguf&quot;</span>,<br> path_in_repo=<span class="hljs-string">&quot;causallm_14b.IQ3_XS.gguf&quot;</span>,<br> repo_id=<span class="hljs-string">&quot;Limour/CausalLM-14B-GGUF&quot;</span><br>)<br></code></pre></td></tr></table></figure>]]></content:encoded>
  153. <category domain="https://hexo.limour.top/tags/llama/">llama</category>
  154. <comments>https://hexo.limour.top/Running-Qwen-on-the-Win10-platform-with-6GB-of-video-memory#disqus_thread</comments>
  155. </item>
  156. <item>
  157. <title>【记录】轻量个人导航页面 Flare</title>
  158. <link>https://hexo.limour.top/Lightweight-personal-navigation-page-Flare</link>
  159. <guid>https://hexo.limour.top/Lightweight-personal-navigation-page-Flare</guid>
  160. <pubDate>Sun, 31 Dec 2023 17:18:28 GMT</pubDate>
  161. <description>&lt;p&gt;&lt;a href=&quot;https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL3NvdWx0ZWFyeS9kb2NrZXItZmxhcmU=&quot; rel=&quot;noopener external nofollow noreferrer</description>
  162. <content:encoded><![CDATA[<p><a href="https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL3NvdWx0ZWFyeS9kb2NrZXItZmxhcmU=" rel="noopener external nofollow noreferrer">Flare</a> 是一款轻量、快速、美观的个人导航页面,适用于 HomeLab 或其他注重私密的场景。</p><ul><li><a href="/Docker-bu-shu-Nginx-Proxy-Manager">反向代理服务</a></li><li>访问 <code>https://flare.limour.top/editor</code> 进行书签编辑</li></ul><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><code class="hljs bash"><span class="hljs-built_in">mkdir</span> -p ~/app/flare &amp;&amp; <span class="hljs-built_in">cd</span> ~/app/flare &amp;&amp; nano docker-compose.yml<br>sudo docker-compose up -d <span class="hljs-comment"># flare:5005</span><br>sudo docker-compose logs <span class="hljs-comment"># 获取登录密码</span><br></code></pre></td></tr></table></figure><figure class="highlight yml"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br></pre></td><td class="code"><pre><code class="hljs yml"><span class="hljs-attr">version:</span> <span class="hljs-string">&#x27;3.6&#x27;</span><br> <br><span class="hljs-attr">services:</span><br> <span class="hljs-attr">flare:</span><br> <span class="hljs-attr">image:</span> <span class="hljs-string">soulteary/flare</span><br> <span class="hljs-attr">restart:</span> <span class="hljs-string">always</span><br> <span class="hljs-comment"># 默认无需添加任何参数,如有特殊需求</span><br> <span class="hljs-comment"># 可阅读文档 https://github.com/soulteary/docker-flare/blob/main/docs/advanced-startup.md</span><br> <span class="hljs-comment"># 启用账号登录模式</span><br> <span class="hljs-attr">command:</span> <span class="hljs-string">flare</span> <span class="hljs-string">--disable_login=0</span><br> <span class="hljs-attr">environment:</span><br> <span class="hljs-comment"># 如需开启用户登录模式,需要先设置 `nologin` 启动参数为 `0`</span><br> <span class="hljs-comment"># 如开启 `nologin`,未设置 FLARE_USER,则默认用户为 `flare`</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">FLARE_USER=LimourFlare</span><br> <span class="hljs-comment"># 指定你自己的账号密码,默认生成的密码强度堪忧</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">FLARE_PASS=your_password</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">FLARE_OFFLINE=1</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">FLARE_MINI_REQUEST=1</span><br> <span class="hljs-attr">volumes:</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">./app:/app</span><br> <br><span class="hljs-attr">networks:</span><br> <span class="hljs-attr">default:</span><br> <span class="hljs-attr">external:</span> <span class="hljs-literal">true</span><br> <span class="hljs-attr">name:</span> <span class="hljs-string">ngpm</span><br></code></pre></td></tr></table></figure>]]></content:encoded>
  163. <category domain="https://hexo.limour.top/tags/docker/">docker</category>
  164. <category domain="https://hexo.limour.top/tags/ngpm/">ngpm</category>
  165. <category domain="https://hexo.limour.top/tags/homepage/">homepage</category>
  166. <comments>https://hexo.limour.top/Lightweight-personal-navigation-page-Flare#disqus_thread</comments>
  167. </item>
  168. <item>
  169. <title>【记录】Win10平台使用MLC-LLM编译Qwen-1.8B-Chat</title>
  170. <link>https://hexo.limour.top/Compile-Qwen-1.8B-Chat-using-MLC-LLM-on-Win</link>
  171. <guid>https://hexo.limour.top/Compile-Qwen-1.8B-Chat-using-MLC-LLM-on-Win</guid>
  172. <pubDate>Sat, 09 Dec 2023 04:24:07 GMT</pubDate>
  173. <description>&lt;p&gt;&lt;a href=&quot;https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL21sYy1haS9tbGMtbGxt&quot; rel=&quot;noopener external nofollow noreferrer&quot;&gt;MLC-LLM&lt;/a</description>
  174. <content:encoded><![CDATA[<p><a href="https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL21sYy1haS9tbGMtbGxt" rel="noopener external nofollow noreferrer">MLC-LLM</a> 是一种大模型高性能通用部署解决方案,可以通过预编译加速使用本机API原生部署任何大型语言模型。该项目的使命是利用ML编译技术,使每个人都能在其设备上本地开发、优化和部署AI模型。<br><a href="https://hexo.limour.top/go/#aHR0cHM6Ly9odWdnaW5nZmFjZS5jby9Rd2VuL1F3ZW4tMV84Qg==" rel="noopener external nofollow noreferrer">Qwen-1.8B</a> 是阿里云研发的通义千问大模型系列的18亿参数规模的模型。在Qwen-1.8B的基础上,使用对齐机制打造了基于大语言模型的AI助手 <a href="https://hexo.limour.top/go/#aHR0cHM6Ly9odWdnaW5nZmFjZS5jby9Rd2VuL1F3ZW4tMV84Qi1DaGF0" rel="noopener external nofollow noreferrer">Qwen-1.8B-Chat</a>。</p><h2 id="配置环境">配置环境</h2><ul><li><a href="/-ji-lu--an-zhuang-conda-bing-geng-huan-qing-hua-yuan">安装conda</a></li><li><a href="/Use-Tunnel-to-speed-up-the-connection-of-VPS">Tun模式</a>(管理员权限)</li><li><a href="https://hexo.limour.top/go/#aHR0cHM6Ly9sbG0ubWxjLmFpL2RvY3MvaW5zdGFsbC90dm0uaHRtbCNpbnN0YWxsLXR2bS11bml0eQ==" rel="noopener external nofollow noreferrer">详细流程</a></li></ul><figure class="highlight powershell"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br></pre></td><td class="code"><pre><code class="hljs powershell">conda create <span class="hljs-literal">-n</span> mlc_llm python numpy pytorch transformers scipy timm git <span class="hljs-literal">-c</span> pytorch <span class="hljs-literal">-c</span> conda<span class="hljs-literal">-forge</span><br>conda activate mlc_llm<br>python <span class="hljs-literal">-m</span> pip install <span class="hljs-literal">--pre</span> <span class="hljs-literal">-U</span> <span class="hljs-operator">-f</span> https://mlc.ai/wheels mlc<span class="hljs-literal">-ai-nightly</span><br>python <span class="hljs-literal">-c</span> <span class="hljs-string">&quot;import tvm; print(&#x27;\n&#x27;.join(f&#x27;&#123;k&#125;: &#123;v&#125;&#x27; for k, v in tvm.support.libinfo().items()))&quot;</span><br>python <span class="hljs-literal">-c</span> <span class="hljs-string">&quot;import tvm; print(tvm.vulkan().exist)&quot;</span><br><span class="hljs-built_in">cd</span> D:\mlc<span class="hljs-literal">-llm</span><br>git clone <span class="hljs-literal">--depth</span>=<span class="hljs-number">1</span> <span class="hljs-literal">-b</span> main <span class="hljs-literal">--single-branch</span> https://github.com/mlc<span class="hljs-literal">-ai</span>/mlc<span class="hljs-literal">-llm</span>.git<br><span class="hljs-built_in">cd</span> .\mlc<span class="hljs-literal">-llm</span>\<br>git submodule sync<br>git submodule update <span class="hljs-literal">--init</span> <span class="hljs-literal">--recursive</span> <span class="hljs-literal">--depth</span>=<span class="hljs-number">1</span><br>pip install .<br>python <span class="hljs-literal">-m</span> mlc_llm.build <span class="hljs-literal">--help</span><br></code></pre></td></tr></table></figure><h2 id="准备模型">准备模型</h2><figure class="highlight powershell"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><code class="hljs powershell">python <span class="hljs-literal">-c</span> <span class="hljs-string">&quot;from huggingface_hub import snapshot_download; snapshot_download(repo_id=&#x27;Qwen/Qwen-1_8B-Chat&#x27;, local_dir=&#x27;D:\mlc-llm\qwen&#x27;, ignore_patterns=[&#x27;*.h5&#x27;, &#x27;*.ot&#x27;, &#x27;*.msgpack&#x27;, &#x27;*.safetensors&#x27;])&quot;</span><br><span class="hljs-built_in">cd</span> D:\mlc<span class="hljs-literal">-llm</span>\qwen<br>D:\aria2\aria2c.exe <span class="hljs-literal">--all-proxy</span>=<span class="hljs-string">&#x27;http://127.0.0.1:7890&#x27;</span> <span class="hljs-literal">-o</span> <span class="hljs-string">&#x27;model-00001-of-00002.safetensors&#x27;</span> <span class="hljs-string">&quot;https://huggingface.co/Qwen/Qwen-1_8B-Chat/resolve/main/model-00001-of-00002.safetensors?download=true&quot;</span><br>D:\aria2\aria2c.exe <span class="hljs-literal">--all-proxy</span>=<span class="hljs-string">&#x27;http://127.0.0.1:7890&#x27;</span> <span class="hljs-literal">-o</span> <span class="hljs-string">&#x27;model-00002-of-00002.safetensors&#x27;</span> <span class="hljs-string">&quot;https://huggingface.co/Qwen/Qwen-1_8B-Chat/resolve/main/model-00002-of-00002.safetensors?download=true&quot;</span><br></code></pre></td></tr></table></figure><h2 id="编译模型">编译模型</h2><figure class="highlight powershell"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><code class="hljs powershell"><span class="hljs-built_in">cd</span> D:\mlc<span class="hljs-literal">-llm</span>\dist<br>python <span class="hljs-literal">-m</span> mlc_llm.build <span class="hljs-literal">--model</span> <span class="hljs-string">&quot;D:\mlc-llm\qwen&quot;</span> <span class="hljs-literal">--target</span> vulkan <span class="hljs-literal">--quantization</span> q0f16 <span class="hljs-literal">--use-safetensors</span><br></code></pre></td></tr></table></figure><ul><li>等待模型支持:<a href="https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL21sYy1haS9tbGMtbGxtL2lzc3Vlcy8xMzcz" rel="noopener external nofollow noreferrer">Model type qwen not supported</a></li></ul>]]></content:encoded>
  175. <category domain="https://hexo.limour.top/tags/llama/">llama</category>
  176. <comments>https://hexo.limour.top/Compile-Qwen-1.8B-Chat-using-MLC-LLM-on-Win#disqus_thread</comments>
  177. </item>
  178. <item>
  179. <title>【探索】外科打结法中的等价操作</title>
  180. <link>https://hexo.limour.top/Equivalent-operations-in-surgical-knot-tying</link>
  181. <guid>https://hexo.limour.top/Equivalent-operations-in-surgical-knot-tying</guid>
  182. <pubDate>Sat, 02 Dec 2023 06:47:05 GMT</pubDate>
  183. <description>&lt;p&gt;手术中的止血和缝合,均需要进行结扎,而结扎是否牢固,又与打结有密切关系,结一定要打得牢固,不能松动、滑脱。&lt;br&gt;
  184. 常用的结扣是方结,结扎后极为牢固,在手术中最常用。而打方结时,手法顺序错误就容易打成假结或滑结。因此这里将探讨基础打结手法的等价性,帮助快速理解不同手法所成结</description>
  185. <content:encoded><![CDATA[<p>手术中的止血和缝合,均需要进行结扎,而结扎是否牢固,又与打结有密切关系,结一定要打得牢固,不能松动、滑脱。<br>常用的结扣是方结,结扎后极为牢固,在手术中最常用。而打方结时,手法顺序错误就容易打成假结或滑结。因此这里将探讨基础打结手法的等价性,帮助快速理解不同手法所成结的本质。<br>除不易混淆的外科结外,无论是单手打结还是持钳打结,均由基础动作组合而成,基础动作所成的结都对应纽结理论中的三叶结。三叶结有两种,它们互成镜像,彼此不相同痕,分别称为左手三叶结和右手三叶结。因此无论用哪种手法,最后一定能对应到两种三叶结上。</p><p><img src="https://img.limour.top/2023/12/02/656ad3ee2a544.webp" alt="两种三叶结"></p><p><img src="https://img.limour.top/2023/12/02/656ad41001b48.webp" alt="右手勾法对应右手三叶结"></p><p><img src="https://img.limour.top/2023/12/02/656ad4260ec27.webp" alt="左手勾法对应左手三叶结"></p><p><img src="https://img.limour.top/2023/12/02/656ad43dec1e3.webp" alt="右手掏法对应左手三叶结"></p><p><img src="https://img.limour.top/2023/12/02/656ad43fd2842.webp" alt="左手掏法对应右手三叶结"></p><p><img src="https://img.limour.top/2023/12/02/656ad44069922.webp" alt="镊右手定则法对应右手三叶结"></p><p><img src="https://img.limour.top/2023/12/02/656ad43e8f792.webp" alt="镊左手定则法对应左手三叶结"></p><p>因此,右手勾法、左手掏法、镊右手定则法三者等价;左手勾法、右手掏法、镊左手定则法三者等价。任意组合两种基础打结动作打出<strong>不同</strong>的两种三叶结即可组成一个正确的方结。</p>]]></content:encoded>
  186. <category domain="https://hexo.limour.top/tags/%E6%8E%A2%E7%B4%A2/">探索</category>
  187. <comments>https://hexo.limour.top/Equivalent-operations-in-surgical-knot-tying#disqus_thread</comments>
  188. </item>
  189. <item>
  190. <title>【翻译】多重免疫分析揭示了血清免疫蛋白质组学在预测胃癌术前化疗反应中的作用</title>
  191. <link>https://hexo.limour.top/Multiplex-immune-profiling-reveals-the-role-of-serum-immune-proteomics-in-predicting-response-to-preoperative-chemotherapy-of-gastric-cancer</link>
  192. <guid>https://hexo.limour.top/Multiplex-immune-profiling-reveals-the-role-of-serum-immune-proteomics-in-predicting-response-to-preoperative-chemotherapy-of-gastric-cancer</guid>
  193. <pubDate>Fri, 01 Dec 2023 15:41:27 GMT</pubDate>
  194. <description>&lt;div class=&quot;note note-info&quot;&gt;
  195. &lt;p&gt;原文链接:&lt;a href=&quot;https://hexo.limour.top/go/#aHR0cHM6Ly9kb2kub3JnLzEwLjEwMTYvai54Y3JtLjIwMjMuMTAwOT</description>
  196. <content:encoded><![CDATA[<div class="note note-info"> <p>原文链接:<a href="https://hexo.limour.top/go/#aHR0cHM6Ly9kb2kub3JnLzEwLjEwMTYvai54Y3JtLjIwMjMuMTAwOTMx" rel="noopener external nofollow noreferrer">Multiplex immune profiling reveals the role of serum immune proteomics in predicting response to preoperative chemotherapy of gastric cancer</a></p> </div><h2 id="摘要">摘要</h2><p>对于胃腺癌患者,对术前化疗的反应存在异质性。该领域现有的研究主要集中在肿瘤微环境(TME)上,而关于全身免疫与化疗反应之间的关系知之甚少。在这项研究中,我们收集了胃腺癌患者在术前、术中和术后接受术前化疗前后的血清样本,并使用基于抗体的蛋白质组学面板研究其免疫蛋白质组学。我们还收集了手术切除的肿瘤样本,并采用多种方法评估它们的肿瘤微环境。我们发现局部和全身免疫特征均与治疗反应相关。术前化疗引发了复杂的全身免疫反应,表现为动态的血清免疫蛋白质组学。建立了一个用于预测反应的术前血清蛋白评分系统。总的来说,这些发现突显了全身免疫在胃癌治疗中的基本但在很大程度上被低估的作用,建议使用基于术前血清免疫蛋白质组学的患者分层策略。</p><h2 id="导言">导言</h2><p>胃癌,其中胃腺癌(GAC)是其主要组织学类型,是全球最常见的恶性肿瘤之一,也是导致癌症相关死亡的主要原因之一。相当一部分胃癌患者在晚期被诊断,这在很大程度上限制了治疗的有效性和患者的预后。尽管手术切除仍然是治疗的强制性支柱,包括JCOG9501和JCOG9502(日本临床肿瘤研究组的系列研究)在内的几项研究表明,胃癌患者不会从扩大切除中受益。在过去的十年中,新辅助和围手术期治疗带来了新的希望。MAGIC试验表明,对于可切除的II/III期胃癌患者,行三个术前和三个术后周期的ECF(表阿霉素、顺铂和5-氟尿嘧啶)化疗,相较于仅手术,可以将5年生存率从23%提高到36%(MAGIC: the Medical Research Council Adjuvant Gastric Infusional Chemotherapy)。FLOT4-AIO试验进一步显示,与ECF或ECX(表阿霉素、5-氟尿嘧啶和卡培他滨)相比,FLOT(5-氟尿嘧啶、叶酸、奥沙利铂和多西紫杉醇)方案可导致更好的病理反应率、R0切除率和总生存(OS)。人们认识到,术前用化疗治疗可以增加根治切除的机会,消除早期微观扩散,并允许对辅助治疗进行术前反应评估。随着免疫检查点抑制剂(ICIs)等新药物的出现,化疗仍然是胃癌围手术期治疗中最基本且可获得的组成部分。<br>另一方面,在胃癌中,术前治疗仍然存在争议,尤其是在东亚国家。对术前化疗的反应存在异质性,而对其机制的了解有限。需要预测患者对术前化疗反应的生物标志物,以对患者进行最佳治疗分层。新出现的证据表明,免疫参与了患者对化疗的反应。Choi等人报道称,肿瘤标本中基质程序性细胞死亡配体1(PD-L1)的表达可以预测第II/III期胃癌经D2胃切除术后辅助化疗的益处。 Kim等人在标准一线化疗期间使用配对的术前和治疗期间的胃活检样本,发现化疗诱导了自然杀伤细胞(NK)的浸润,巨噬细胞的极化,以及在治疗反应者中抗原呈递的增加。但是,在胃癌免疫学领域的现有研究主要集中在肿瘤微环境(TME)中的局部免疫反应上,关于全身免疫与胃癌化疗反应之间的关系知之甚少。<br>胃癌是一种全身性疾病。肿瘤负担和抗肿瘤治疗刺激的免疫反应在不同组织之间协调进行。对接受术前化疗的患者进行系统免疫景观或由Hiam-Galvez等人描述的免疫宏环境的分析对于全面了解癌症免疫和治疗抵抗机制至关重要。现有的系统免疫-炎症指标,如中性粒细胞与淋巴细胞比值(NLR),主要依赖于血细胞计数,这限制了它们的维度。血清免疫蛋白质组学,具有高含量,将是对全身性免疫的理想反映。在这项研究中,我们收集了胃腺癌患者在术前、术中和术后接受术前化疗的血清样本,并使用基于抗体的蛋白质组学平台(Olink Target 96 Inflammation panel)研究了他们的免疫蛋白质组学。我们还从这些患者中收集了手术切除的肿瘤样本,并结合多重免疫荧光(mIF)、免疫组织化学(IHC)和RNA测序(RNA-seq)来评估肿瘤微环境。研究了血清免疫蛋白质组学的动态变化及其与肿瘤微环境的相关性。鉴定了预测接受术前化疗患者肿瘤缩小、总生存(OS)和无进展生存(PFS)的生物标志物。</p><h2 id="结果">结果</h2><h3 id="研究人群">研究人群</h3><p>本研究纳入了90名接受术前化疗并随后接受胃切除手术的胃腺癌患者(图1A)。在术前期间接受免疫检查点抑制剂(ICIs)的患者被排除在外。符合条件的患者被分为响应者(残余肿瘤/肿瘤床≤50%的化疗效果,Becker TRG评分1–2)和非响应者(Becker TRG评分3)。在90名患者中,有36人(40%)达到了肿瘤缩小评分1–2,被视为响应者。肿瘤缩小程度较好的患者与非响应者相比,总生存显著更长(图S1A)。无进展生存显示了类似的趋势,尽管没有统计学差异(图S1B)。患者的基本临床特征总结在表S1中。近半数的患者接受了两药细胞毒性化疗,其中大多数是SOX(S-1加奥沙利铂)或XELOX(卡培他滨加奥沙利铂)方案。其余的患者接受了三药细胞毒性化疗,主要是DOS(多西紫杉醇、奥沙利铂和S-1)方案。截至2022年3月1日的分析日期,中位随访时间为55.8个月(范围从3.2到82.7个月)。在整体人群中,中位无进展生存为39.8个月(95%置信区间[CI],32.7至未达到[NR]),而中位总生存为63.9个月(95% CI,51.8至74.1),有45例死亡(50%)。</p><h3 id="血清免疫蛋白质组学动态与术前化疗反应相关">血清免疫蛋白质组学动态与术前化疗反应相关</h3><p>从接受术前化疗的患者中收集了37份术前、8份术中和83份术后的血清样本,其中30份术前和30份术后的血清样本是成对的(图1A)。使用Olink Target 96 Inflammation panel的近距离扩展测定法(PEA)测量了关键免疫和炎症通路中92个标记蛋白的水平。比较术前和术后血清样本中蛋白质水平显示了术前化疗后血清免疫蛋白质组学的动态变化。92个蛋白中有18个在成对和非成对测试中均显示出显著变化(图1B、图S1C和图S1D),表明术前化疗引发了复杂的全身免疫反应。其中,血清C-X-C基序化学因子配体1(CXCL1)和CXCL5水平在术前化疗后显著下降(图S1D)。有趣的是,Zhou等人报道称,作为CXCR2配体的CXCL1和CXCL5可以显著促进胃癌细胞的迁移,并推动胃癌的转移。化疗通过降低CXCL5和CXCL1的血清水平可能有助于预防胃癌的转移。事实上,CXCL1/5水平在术前化疗的早期周期中下降(图S1E)。<br>我们进一步比较了不同治疗反应患者的血清免疫蛋白质组学动态变化。我们发现,响应者在治疗后表现出更动态的血清免疫蛋白质组学变化(图1C和1D)。我们还比较了化疗后响应者和非响应者蛋白水平的绝对变化,发现在响应者中,免疫蛋白水平在化疗后整体上更大幅度的变化(图1E)。例如,与响应者相比,非响应者治疗后血清CXCL5水平的降低程度要轻得多(图1C–1F)。在治疗期间的蛋白质组学在响应者和非响应者中也似乎存在差异(图S1E)。例如,在响应者中,治疗期间血清白介素受体亚单位b(IL-10RB)和IL-18水平在化疗过程中呈上升趋势,而在非响应者中未呈现这种趋势(图S1F和图S1G),尽管这一部分的结论可能受到样本数量的限制。<br>综合而言,这些结果表明在胃腺癌患者中对术前化疗存在复杂的全身性免疫反应。响应者在术前化疗后往往表现出更为动态的全身性免疫反应。</p><h3 id="肿瘤微环境(TME)与患者对术前化疗的反应相关">肿瘤微环境(TME)与患者对术前化疗的反应相关</h3><p>首先,我们比较了来自不同治疗反应患者的肿瘤样本的转录组,以获得有关肿瘤局部特征的一般知识。基因集富集分析(GSEA)显示了良好反应者中改变的标志性通路(图2A)。如DNA复制和细胞周期等通路的改变,可能表明抑制癌细胞增殖和肿瘤退化。除此之外,近一半的通路与免疫有关,如趋化因子信号通路和细胞因子与细胞因子受体相互作用通路(图2B和图2C),表明免疫在化疗中的重要性。<br>因此,我们通过多重免疫荧光(mIF)在手术切除的肿瘤样本中评估了地理免疫景观。我们使用CD4、CD8和Foxp3染色来识别不同类型的T细胞。我们使用CD68和CD163染色来识别巨噬细胞(图2D)。我们比较了响应者和非响应者之间的免疫浸润。CD68+巨噬细胞和CD68+/CD163+ M2巨噬细胞的细胞密度在非响应者中显著更高(图2E和图S2A)。相应地,Xing等人报道称,在胃癌新辅助化疗后,非响应者中CD68+巨噬细胞浸润更高。M2巨噬细胞也被证明参与了多种癌症的化疗耐药。<br>与此同时,我们从队列中收集了24份术前内镜活检样本。我们使用mIF对术前TME进行了分析(图S2B)。值得注意的是,大多数内镜活检只获取了胃的表浅黏膜,这在很大程度上限制了它们对整个肿瘤的代表性和与手术切除组织的可比性(图S2C)。事实上,mIF显示术前TME中的免疫细胞浸润在响应者和非响应者之间没有差异(修订后的图S2D),这可能是由于活检深度有限和胃癌内肿瘤的显著异质性。<br>总体而言,这些结果表明术后TME与对术前化疗的反应相关。</p><h3 id="血清免疫蛋白质组学与TME之间的相关性">血清免疫蛋白质组学与TME之间的相关性</h3><p>鉴于大多数现有的癌症免疫学研究集中在肿瘤微环境(TME)上,我们评估了全身免疫与TME之间的相关性。我们还确定了血清免疫蛋白质组学与TME中免疫细胞浸润之间的相关性。有趣的是,术后TME似乎与术前而非术后血清免疫蛋白质组学更相关。即使在样本数量较少的情况下,术前血清免疫蛋白质组学与免疫细胞浸润之间的相关性总体上更强(图3A和图3B)。例如,更高的术前血清纤维母细胞生长因子21(FGF21)水平与CD68+巨噬细胞的浸润较少呈相关,而更高的术前血清转化生长因子b1(TGF-b1)水平与CD4+T细胞的浸润较多呈相关(图3C和图3D)。事实上,据报道TGF-b在调节效应器和调节性CD4阳性细胞反应方面具有多效性。 术后血清免疫蛋白质组学与术后免疫细胞浸润之间的相关性也被观察到。例如,更高的术前血清C-C基序化学因子配体11(CCL11)水平与CD4+/FOXP3+ T细胞的浸润较多呈相关(图3E)。王等人报道CCL11增加了乳腺癌中CD4+CD25+Foxp3+调节性T细胞(Tregs)的比例。需要进一步的研究来探讨CCL11是否在胃癌中调节CD4+Foxp3+Treg细胞功能。<br>我们还评估了术后血清蛋白水平与92个免疫基因的肿瘤mRNA水平之间的相关性。其中有5个免疫基因的相关性具有统计学意义,仅有两个是正相关的,符合预期(图3F和图S3A–S3E)。TNFSF12和CCL4的相关性实际上是边缘的(图S3A和图S3B)。血清蛋白水平与组织基因mRNA水平之间的相关性总体上较弱。<br>这些结果显示了全身性免疫与肿瘤微环境之间的相互通信和相互依赖关系。对肿瘤微环境的研究无法充分揭示免疫系统如何全面应对胃癌和抗肿瘤治疗。应该投入更多的努力来对患有胃癌的患者进行系统性免疫分析。</p><h3 id="经典全身性免疫炎症指标的临床价值">经典全身性免疫炎症指标的临床价值</h3><p>经典全身性免疫炎症指标大多基于血细胞比率,并已证明与患者的临床结局相关。 我们对血清免疫蛋白质组学与经典全身性免疫炎症指标之间的关系感到好奇。因此,我们评估了术后血清免疫蛋白质组学与经典免疫炎症指标之间的相关性,包括中性粒细胞与淋巴细胞比值(NLR)、血小板与淋巴细胞比值(PLR)、单核细胞与淋巴细胞比值(MLR)以及血小板分布宽度(PDW)以及常见血细胞计数。尽管大多数相关性相对较弱(图S3F),但血清CXCL5和CXCL1水平与血小板计数呈强相关(图S3G和图S3H)。由于CXCL1和CXCL5通常参与中性粒细胞的稳态和功能,需要更多的工作来理解这种意外但有趣的相关性。我们还评估了经典全身性免疫炎症指标与TME特征之间的关系。术后经典免疫炎症指标与TME中的免疫细胞浸润之间没有观察到相关性(图S3I)。<br>我们进一步探讨了经典全身性免疫炎症指标的临床价值,并评估了中性粒细胞与淋巴细胞比值(NLR)、血小板与淋巴细胞比值(PLR)、单核细胞与淋巴细胞比值(MLR)和血小板分布宽度(PDW)的治疗响应预测价值。我们绘制了这四个指标的受试者工作特征(ROC)曲线,最高的曲线下面积(AUC)为0.602(图S3J)。比例风险回归显示了这四个指标的预后价值。在单变量Cox回归中,这些指标对于OS或PFS均未显示出显著的预后价值,而在多变量Cox回归中,较高的NLR与较短的OS相关,危险比为1.172(95% CI,1.0066–1.3639)(图S3K和图S3L)。相应地,先前的报告已经显示NLR是胃食管交界和胃腺癌的负面预后因子。总体而言,这四个指标的预后价值有限。</p><h3 id="术后肿瘤基质PD-L1水平和术前血清PD-L1水平均可预测术前化疗反应">术后肿瘤基质PD-L1水平和术前血清PD-L1水平均可预测术前化疗反应</h3><p>PD-L1是关键的免疫调控分子。与其受体PD-1相互作用时,PD-L1抑制细胞毒性T细胞的免疫反应,从而参与肿瘤免疫逃逸。Choi等人基于CLASSIC试验队列报告称,基质PD-L1水平可以预测在第II/III期胃癌D2胃切除术后的辅助化疗效果。利用基于PD1/PDL1免疫组织化学染色的类似评分系统,我们发现非响应者在手术切除的肿瘤样本中基质PD-L1染色分数较高的趋势(图4A和图4B)。基质PD-1染色显示了类似的趋势,尽管这在统计学上并不显著(图S4A和图S4B)。然而,肿瘤区域的PD-L1染色与治疗反应没有相关性(图4A)。这些结果表明肿瘤中的基质PD-L1水平可以预测术前化疗的反应,并表明PD-1/PD-L1途径可能在胃癌的化疗抵抗中起到作用。<br>然而,由于其延迟性,术后基质PD-L1的反应预测价值可能会受到较大的限制。理想的预测因子应该是术前的。术前内镜活检的基质PD-L1染色无法预测治疗反应(图S4C和图S4D)。因此,我们进一步评估了术前血清PD-L1水平的临床意义。有趣的是,术前血清PD-L1水平在不同治疗反应的患者中显示出差异(图4C)。在治疗前,响应者的血清PD-L1水平较低,而治疗似乎减弱了这种差异,因为在术后样本中未观察到显著差异(图4E)。利用ROC曲线评估术前和术后血清PD-L1水平的治疗响应预测价值。术前血清PD-L1水平的AUC为0.737(95% CI,0.569–0.904),而术后血清PD-L1水平的AUC约为0.5(图4D和图4F),表明术前血清PD-L1水平是术前化疗的有希望的治疗响应预测因子。术前血清PD-L1水平较高(&gt;5.084归一化蛋白表达[NPX])的患者倾向于对术前化疗显示较差的治疗反应(图S4E)。<br>我们还评估了不同治疗反应患者的治疗期血清PD-L1水平。在响应者中,血清PD-L1在治疗过程中似乎有所增加。响应者的治疗期血清PD-L1水平显著较高(图S4F和图S4G)。这种差异的一个潜在原因可能是肿瘤细胞的破坏。需要更多样本和进一步研究来确认这一发现并揭示潜在机制。进一步测量了PD-L1/PD-1水平与血清PD-L1水平之间的病理学相关性。在不同的配对中,术前血清PD-L1水平和术后基质PD-1水平显示出最强的相关性(图S4H)。术前血清PD-L1水平可能与化疗后肿瘤中PD-1+免疫细胞的浸润有关。<br>总体而言,这些结果表明,术后肿瘤基质PD-L1水平和术前血清PD-L1水平均可以预测术前化疗的反应,而术前血清PD-L1水平应具有更大的临床意义。</p><h3 id="术前血清CCL20水平预测术前化疗的反应">术前血清CCL20水平预测术前化疗的反应</h3><p>受PD-L1的发现启发,我们进一步比较了不同治疗反应患者的术前血清免疫蛋白质组学,结果显示10种蛋白质具有p &lt;0.05的差异。其中,术前CCL20水平显示出最显著的差异。值得注意的是,我们还比较了不同治疗反应患者的术后血清免疫蛋白质组学,与术前样本相比,差异要弱得多(图S5A)。<br>近期的研究已经确立了CCL20在不同癌症中作为化疗抵抗的重要介质。正如图S5B所总结的,Chen等人报告称,化疗通过核因子kB(NF-kB)和CCL20之间的正反馈环路诱导CCL20,并通过上调乳腺癌中的ATP结合盒亚家族B成员1(ABCB1)表达介导化疗抵抗。Wang等人报告称,化疗通过FOXO1/CEBPB/NF-kB信号途径在结直肠癌细胞中上调CCL20,而分泌的CCL20招募调节性T细胞,促进化疗抵抗。Liu等人报告称,顺铂刺激的经典活化巨噬细胞(CAMs)通过增加CCL20的产生促进卵巢癌细胞迁移。总体而言,现有的研究表明,CCL20的上调是由化疗引起的,并且增加的CCL20产生促进了化疗抵抗。<br>然而,我们的研究发现上述模型在胃癌中可能不成立。我们发现,在术前化疗的响应者中,治疗开始前血清CCL20水平显著较低(图5B)。术前血清CCL20水平预测治疗反应,AUC为0.769(95% CI,0.614–0.925)(图5C),表明胃癌患者在治疗前的血清CCL20水平存在差异。与现有的研究结果一致,非响应者的肿瘤中CCL20 mRNA水平上调(图5D)。然而,治疗后血清CCL20水平在响应者和非响应者之间没有差异,表明血清和肿瘤CCL20水平脱钩(图5E)。有趣的是,参考沈等人报道的可切除胃癌的血清和组织蛋白质组学,我们发现胃癌患者的血清CCL20水平相对于健康人有所升高(图5F)。肿瘤样本中CCL20蛋白水平也较正常胃组织高(图S5C)。然而,通过胃切除手术切除肿瘤并没有恢复血清CCL20水平,而是进一步增加了血清CCL20水平(图5F)。这些结果表明,血清CCL20并不是肿瘤CCL20的系统反映,而是系统免疫对胃癌和化疗的重要组成部分。<br>我们还验证了现有研究提出的CCL20上调的信号模型。Kim等人收集了在接受第一线标准化疗但未接受PD-1阻断的治疗前和治疗过程中胃活检样本的治疗前患者。我们分析了他们的转录组数据,并发现化疗并没有增加肿瘤样本中CCL20 mRNA水平。相反,化疗后CCL20 mRNA水平下降(图5G)。这一发现挑战了CCL20在胃癌中是由化疗引起的假设。与此同时,ABCB1、CEBPB和FOXO1 mRNA水平在不同反应的肿瘤之间(图5H)以及在化疗前后活检样本之间(图S5D)也没有差异。相反,更高的术前血清CCL20水平与肿瘤中CD4+T细胞的浸润较少相关(图S5E)。CD4+T细胞介导免疫应答,在实现对肿瘤的调节和有效免疫应答中至关重要。与此同时,更高的术前血清CCL20水平与更多基质中PD-1+或PD-L1+细胞的浸润相关(图5I和图S5F),这应该是肿瘤免疫逃逸的关键介质。总体而言,这些结果表明,血清CCL20诱导了一个针对化疗的系统免疫抑制环境。</p><p>正如图5J所总结的,现有的研究提出,肿瘤中CCL20的上调是由化疗引起的,而增加的CCL20产生促进了化疗抵抗。然而,我们发现在化疗开始前患者的血清CCL20水平存在差异。术前血清CCL20水平较高的患者倾向于具有较差的治疗反应。潜在机制是血清CCL20诱导了一个系统性的免疫抑制环境。这些发现提示,在术前血清CCL20水平较高的患者中,免疫治疗可能与化疗的结合更为有效。已经投入了大量努力来开发CCR6-CCL20轴(CCR6是CCL20的细胞受体)的抑制剂。通过抗体或拮抗剂干扰CCR6-CCL20轴在癌症治疗中显示出潜力。术前血清CCL20水平可能有助于选择那些有望从CCR6-CCL20抑制剂中受益的患者。此外,这些发现表明术前期是通过血清蛋白标志物进行患者分层的一个不可替代的时间窗口。因此,我们决定进一步建立一个用于预测术前化疗反应的术前血清蛋白组合。</p><h3 id="一个用于预测术前化疗反应的术前血清蛋白评分系统">一个用于预测术前化疗反应的术前血清蛋白评分系统</h3><p>通过比较不同治疗反应患者的术前血清蛋白水平(图5A),我们将15个p&lt;0.1的蛋白包括在一致性聚类中。基于一致性累积分布函数(CDF)图、增量面积图以及对一致性矩阵的手动检查,我们发现了四个术前血清亚型(图6A、6B和图S6A–S6H)。其中,cluster 2与患者的明显更好的治疗反应相关(图6C)。这种无审查的聚类还与患者的临床特征相关,如肿瘤的Lauren分类。Cluster 1和4与更高比例的腺癌肿瘤类型相关(图6D)。<br>考虑到临床实用性,我们进一步使用最小绝对值收缩和选择算子(LASSO)模型建立了一个用于预测术前化疗反应的术前血清响应预测分数(PSRscore)(图S6I和S6J)。简而言之,LASSO回归是一种使用收缩进行变量选择或参数消除的线性回归类型。通过适当的l值,PSRscore的公式限制为四个蛋白质的血清水平:CCL3、IL-15Ra、CXCL5和CCL20(图6F和图S6K)。PSRscore的ROC曲线,AUC为0.907(95% CI,0.814–1.000),确定了截断值为-0.843(图6E)。患者被分为PSRscore高组和低组(图6F)。低PSRscore与明显较差的治疗反应相关(图6G)。此外,PSRscore低的患者在术后肿瘤中数值上具有更多PD1+/PD-L1+细胞的基质浸润和更高的肿瘤PD-L1染色(图6H和图S6L),这通常导致对抗PD-1/PDL1疗法的适应症。<br>除了CCL20外,PSRscore还包括CCL3、IL-15Ra和CXCL5的术前血清水平。较高的血清CCL3和IL-15Ra水平以及较低的CXCL5水平与较差的治疗反应相关(图S6K)。研究表明,CCL3参与了不同癌症中的免疫逃逸和化疗抵抗。高水平的CCL3与Tregs、肿瘤相关巨噬细胞(TAMs)和髓系源性抑制细胞(MDSCs)的肿瘤内浸润增加相关。CCL3驱动的TAMs招募已被认为是转移性巢穴的驱动事件。已经开发了CCL3的中和抗体和抑制剂,并在抗癌治疗中显示出潜力。 目前对IL-15Ra和CXCL5在化疗抵抗中的作用了解有限,需要更多研究来探索它们在胃癌中的功能。<br>PSRscore评分系统有助于分层胃腺癌患者,并筛选出那些可能不能仅通过术前化疗获益的患者。对于这组患者,我们的工作强烈暗示患者可能从免疫治疗的组合中受益,如免疫检查点抑制剂(ICIs)或CCL3/20中和抗体/抑制剂(图6I)。可以设计前瞻性试验来验证这一策略,并需要建立一个验证队列来验证此评分系统的灵敏性和特异性。</p><h3 id="TME和血清免疫蛋白组学的预后价值">TME和血清免疫蛋白组学的预后价值</h3><p>我们进一步评估了TME和血清免疫蛋白组学的预后价值。在多变量Cox回归中包括了在单变量Cox回归中具有预测价值的所有基本临床特征以及年龄和性别(表S2和S3)。显示为OS或PFS预测因子的免疫细胞与其风险比一起列在森林图中(图S7A和S7B)。绘制了代表性生存预测因子的Kaplan-Meier曲线(图S7C–S7F)。没有免疫细胞类型是OS的独立预测因子,而CD68+巨噬细胞的浸润通过log rank测试、单变量Cox回归和多变量Cox回归证实,预测PFS缩短(图S7C)。虽然不是独立的,CD68+巨噬细胞的浸润也通过log rank测试显示为OS的负面预后因子(图S7D)。<br>显示为OS或PFS预测因子的术前和术后血清蛋白也在森林图中列出,与其风险比一起(图7A、7B、S7G和S7H)。绘制了代表性生存预测因子的Kaplan-Meier曲线(图7C、7D、S7I和S7J)。其中,高术后血清IL-10RB水平与显著缩短的OS和PFS均相关,通过log rank测试、单变量Cox回归和多变量Cox回归证实(图7C和7D)。这表明术后血清IL-10RB水平是接受术前化疗的患者的强烈负面生存预测因子。值得注意的是,术后IL-10RB水平在术前化疗后显著升高,表明其可能参与术前化疗的反应(图S1D)。关于IL-10信号在胃癌中的作用的研究还有限。需要更多的工作来了解IL-10RB在胃癌术前治疗中的作用。</p><h2 id="讨论">讨论</h2><p>在过去的十年里,人们致力于揭示免疫在癌症中的作用。免疫疗法在胃癌治疗中取得了突破,免疫检查点抑制剂成为晚期胃或食管腺癌的一线治疗方法。然而,在胃癌围手术期治疗中,目前没有治疗方法成功挑战了化疗的主导地位。免疫被认为在患者受益于围手术期化疗中起着关键作用。现有研究重点关注肿瘤微环境中局部免疫反应,而对胃癌免疫的改善理解必须特别评估全身性免疫。我们使用血清免疫蛋白组学和经典全身性免疫炎症指标来描述全身免疫,并研究其与肿瘤微环境以及治疗反应的关联。我们发现围手术期治疗诱导了复杂的全身性免疫反应,这表现为动态的免疫蛋白组学。同时,对治疗反应更好的患者在治疗后显示出更具动态性的血清免疫蛋白组学变化。肿瘤微环境也显示与围手术期化疗的反应有关。然而,在治疗开始之前预测潜在的治疗反应将更加实际。令人兴奋的是,我们发现PD-L1和CCL20的术前血清水平是围手术期化疗反应的预测因子,与它们在免疫抑制中的已知作用一致。进一步建立了一个术前血清蛋白质组学面板用于预测反应,能够精确地筛选出可能不会单独对围手术期化疗产生反应的患者。对于这部分患者,我们相信他们将从免疫疗法和化疗的联合治疗中受益。同时,IL-10RB的术后血清水平也被确认为胃癌患者预后的强大预测因子。<br>肿瘤内PD-L1在免疫抑制和化疗抵抗中的作用已经得到确认。然而,关于可溶性PD-L1的研究有限。我们的研究发现,在化疗开始之前,患者的血清PD-L1水平存在差异。对化疗产生反应的患者往往具有较低的血清PD-L1水平。需要进一步研究可溶性PD-L1在化疗抵抗中是否发挥作用。在CCL20中也发现了类似的发现,这是一种已知参与各种癌症化疗抵抗的趋化因子。我们的研究表明,在其他癌症类型中提出的CCL20诱导的化疗抵抗模型在胃癌中可能不成立。将CCL20的变化视为化疗的结果,剥夺了临床医生在治疗前对患者进行分层和干预的主动性。相反,我们的发现显示,在化疗开始之前,对化疗产生不同反应的患者在血清免疫蛋白组学上存在差异,这提前了患者分层和干预的时间窗口。在PD-L1和CCL20的启发下,我们开发了一个用于预测围手术期化疗反应的术前血清蛋白质组学面板,称为PSRscore。通过计算四种免疫蛋白的术前血清蛋白水平,患者可以被分为两组。PSRscore低的患者往往具有较差的治疗反应,并可能从免疫疗法的联合治疗中获益。这种评分系统在患者分层方面具有很大的临床应用潜力。值得注意的是,PSRscore的建立基于一个接受铂类化疗的亚洲队列。这些免疫标志物在接受紫杉醇为基础的方案的非亚洲患者中的表现需要进一步验证。<br>我们相信血清蛋白标志物在胃癌患者术前分层中具有特殊的临床意义。几乎所有现有的胃癌分子分类都依赖于手术或内镜切除的肿瘤组织。以TCGA分类为最著名的例子,微卫星不稳定(MSI)型患者被证明更容易从免疫疗法中受益,而基因组稳定(GS)型患者对化疗反应较差。然而,这些分子分类在临床实践中很少使用。一个重要原因是大多数分子分类依赖于复杂的分子技术,如qPCR、原位杂交,甚至是组学技术,这在大多数临床中是不可获得的。此外,在胃癌中,术前获取肿瘤样本依赖于胃镜活检。胃癌存在显著的肿瘤内异质性,且活检深度有限,这在很大程度上影响了活检样本的代表性。因此,在胃切除术之前确定胃癌的分子分类一直非常困难。相比之下,血清蛋白质组学涵盖了系统和肿瘤局部特征,因此具有灵敏性和信息性。在临床中,可以轻松获取血清样本,对患者造成的损害有限。像前列腺特异性抗原(PSA)或甲胎蛋白(AFP)这样的血清蛋白标志物已经几十年用于癌症的诊断和随访。各种医院都广泛提供用于测量血清蛋白的设备和培训人员。这些因素赋予了胃癌血清蛋白质组学研究在临床上巨大的意义。未来应建立胃癌的血清蛋白分类,以指导胃癌的围手术期治疗。</p><h2 id="局限性">局限性</h2><p>研究存在一些需要注意的局限性。首先,治疗期间血清样本的数量相对较小,这限制了得出某些结论的统计能力。其次,多重免疫荧光(mIF)只测量了肿瘤微环境(TME)中的关键免疫细胞。单细胞测序可以更好地描绘TME。第三,本研究的一些结论和建议应在接受术前化疗的患者的前瞻性队列甚至随机对照试验中进行进一步验证。在解释数据时应考虑这些局限性。<br>总的来说,我们对胃癌患者的全身免疫系统和肿瘤微环境进行了描述,并展示了它们与术前化疗反应的关联。我们鉴定了用于预测治疗反应和预后的血清生物标志物。这项工作强调了全身免疫在胃癌术前化疗中的基本但很大程度上被低估的作用,支持了一种基于术前血清免疫蛋白质组学的患者分层策略,并突显了在未来研究中全面描绘免疫的重要性。</p>]]></content:encoded>
  197. <category domain="https://hexo.limour.top/tags/%E7%BF%BB%E8%AF%91/">翻译</category>
  198. <category domain="https://hexo.limour.top/tags/%E9%A2%84%E5%90%8E%E6%A8%A1%E5%9E%8B/">预后模型</category>
  199. <comments>https://hexo.limour.top/Multiplex-immune-profiling-reveals-the-role-of-serum-immune-proteomics-in-predicting-response-to-preoperative-chemotherapy-of-gastric-cancer#disqus_thread</comments>
  200. </item>
  201. <item>
  202. <title>【转载】圈中人</title>
  203. <link>https://hexo.limour.top/repost-in-circle</link>
  204. <guid>https://hexo.limour.top/repost-in-circle</guid>
  205. <pubDate>Thu, 16 Nov 2023 06:59:47 GMT</pubDate>
  206. <description>&lt;blockquote&gt;
  207. &lt;p&gt;《&lt;a href=&quot;https://hexo.limour.top/go/#aHR0cHM6Ly93ZWIuYXJjaGl2ZS5vcmcvd2ViLzIwMjMwOTA5MTcwMDQxL2h0dHBzOi8vYmxvZy5jeHBsYXkub3</description>
  208. <content:encoded><![CDATA[<blockquote><p>《<a href="https://hexo.limour.top/go/#aHR0cHM6Ly93ZWIuYXJjaGl2ZS5vcmcvd2ViLzIwMjMwOTA5MTcwMDQxL2h0dHBzOi8vYmxvZy5jeHBsYXkub3JnL3dvcmtzL2luLWNpcmNsZS8=" rel="noopener external nofollow noreferrer">圈中人</a>》 from <a href="https://hexo.limour.top/go/#aHR0cHM6Ly9ibG9nLmN4cGxheS5vcmcv" rel="noopener external nofollow noreferrer">CXPLAY World</a></p></blockquote><h2 id="序">序</h2><p>外面很危险, 于是有人在地上画了一圈并对我说: “这个圈外很危险, 你不要随便出去, 我会帮你对付这些危险, 所以我很忙, 但我也会派人监督你.”;</p><p>监督人来了, 他对我说: &quot;你知道要怎么做了吧?但为了防止你总是不小心碰到边界, 我会给你的粗心大意一些小小的惩罚, 好让你长记性, 毕竟我要监督的人可不止你一个. &quot;, 于是监督人在圈内画了一个更小的圈.</p><p>最后, 我发现睡觉的时候翻个身也总是不小心超出监督人画的圈, 于是我自己给自己画了一个圈, 好让我只能站在原地不得动弹就再也不能睡觉, 也就不会出现无意识地超出圈外了;</p><p>但我好像忘记了我本来是可以出去的, 但是由于缺少保护自己的经验, 我也渐渐不敢出去了. 因为相比之下, 监督人的惩罚显得比圈外的危险来得更具体更具有危险性, 我也更有经验去应付.</p><h2 id="正文">正文</h2><p>这里的人常说, 如今这个地方变成这个样子完全是由于那座灯塔.</p><p>在那个我还未曾触及到的时代, 这里的格局并不是这个样子, 这里和外面的世界都是一样的一马平川无所遮拦. 没有如今形同棋盘一样的森严布局, 更没有那座灯塔. 但是不知道什么时候, 似乎是从巨墙开始, 有一部分人开始为这个地方建起了障碍.</p><p>起初只是一道篱笆, 一个土沟, 再后来不约而同地全都变成了深红色质地的石块, 这种材料从来没有听说过是从什么地方开采出来, 但就现在的状态来看, 这墙就好像是从地底自己长出来一般: 整齐划一, 密不透风, 坚不可摧. 不过现在也有人觉察到, 曾经光滑细密的红墙上出现了裂缝, 但这裂缝实质上与这墙并没有多大关系, 因为裂缝时而出现时而消失, 没有人知道红墙是如何做到这种程度的自愈的, 就好像它是活的一样.</p><p>再说起那个灯塔, 它的材质和红墙并不一样, 但据说两者建立起来的时间都是相同的. 灯塔没有密不透风的样子, 它的表面就像积木拼接一般有很明显的缝隙, 甚至有的地方还有空缺, 不过这些缺陷也和墙上的缝隙一样时而消失时而出现. 但就我认知之中看来, 人人都说这座灯塔是这里的每个人亲手筑起的, 却没有人知道这墙的成因, 可能是因为人人心里都明白但都心照不宣, 也可能是因为它本来就是自己从这块大地上 “生长” 出来的, 因为它的材质从今天看始终都不像是这个大陆上应该存在的东西.</p><hr><p>那片生长在红墙上的蓝色平原, 听说最开始的时候和墙的颜色一样, 是红色的, 后来平原的住民们逐渐发现所有的植物开始变成了蓝色, 土壤也变成了深蓝色. 由此原来单调的墙上多出来一片蓝色, 也吸引了更多的斗篷们过来在平原上定居. 一位从红色时期就开始在这里居住的斗篷曾经和我说, 目前来说这里变成蓝色和红色的时候并没有什么差别, 但其他斗篷们却都不约而同的住进了这里, 问起这些新来者理由却一个二个都含糊其词甚至回答不上来到底是为什么来到这里, 听得最多的一个理由就是: “因为大家都来这里了.” 事实真是这样的话, 那这个平原上早就已经人满为患了, 有相当部分人踏足这里之后就离去了, 还有部分人暂住过一段时间后也悄然离去, 他们留下的痕迹会很快地消失, 绵密的蓝色植被会再次长满被斗篷践踏的区域, 平原变得就好像从来没有人来过的一样.</p><p>当我问起这位斗篷他自己来这里的理由的时候, 它也沉默了, 但没有很久, 它问我: “你喜欢听故事吗?”, 我说: “是真相吗?”, “我也… 不知道.” 他回答我.</p><p>我接着说: “好吧, 虽然我不是来听故事的, 但如果是你愿意说给我听的故事, 我也愿闻其详.”</p><p>斗篷没有办法呈现出背后人的表情, 所以我在这面具上也办法没找到什么破绽, 它拿起了篝火边的一根细木柴在灰烬边画了起来.</p><p>它画了一个圆圈: “我们现在的位置应该是这里, 应该是.” 它在圆圈边上加了一个小圆圈, 指着它说.</p><p>“但如果我再把里面的「棋盘」画出来.” 它在大圆中画了几个小圆.</p><p>“如果我没猜错, 你我都是这棋盘里面的人, 你应该会明白.” 它接着在大圆和几个小圆之间用另外一个内圆隔开了, 出现了一个同心圆套一堆小圆圈的图画. 最后它在同心圆里面套了更多的圆, 为了套下更多的圆还把几个小圆擦去了, 几个小圆最后被数个大圆套在里面, 整个画面就好像一个靶子一样.</p><p>它放下了木柴, 对我说 “曾经我也是这样一个个小圆中的人, 总以为外面有什么好东西, 想要出去看看.”</p><p>“后来, 我真的出去了, 这个小圆不再是束缚, 但我发现外面的世界有更多的圆圈.” 它指着那些同心圆一路向外, 最后到了代表我们位置的那个大圆边缘上的小圆.</p><p>“最后我费尽千辛万苦, 终于从这些圆里出来了, 到了这里, 现在被叫做「蓝色平原」的地方.” 它又拿起细木柴握在手心.</p><p>“还是红色的那个时候, 我心里就想, 这里也许就是这世界的最后一个圆了吧?”</p><p>“说实话, 现在我们这个圆外面的景色从来就没变过, 我最后本来打算继续往外去亲眼看看那些五彩斑斓的风景的.”</p><p>它沉默下来, 用木柴继续在最外围的圆上加了一个更大的圆, 不过这次画布的面积不太够用了, 只画了一部分.</p><p>它指着新画的那个圆对我说: “有一天我在平原上看到一个蓝色的斗篷, 我一如既往地和它打着招呼, 但是它说着一口不是我们的语言, 它好像也听不懂我在说什么.”</p><p>“蓝色的斗篷好像也明白了我们两人语言不通, 就在脚下开始用手指在雪地上画画.”</p><p>“一个圈, 两个圈, 三个圈. 我已经记不清他当时画了多少个圈了, 最后同心圆的外围上也多出了一个代表着当时位置的小圆.”</p><p>“看起来就和现在这幅画几乎一模一样, 我现在都不知道它当时到底是在往圈外走还是进到一个新的圈里. 「蓝色平原」这个小圈又在什么位置呢?” 它又放下了木柴.</p><p>“蓝斗篷画完之后我明白了它的意思, 它好像要往我曾经来过的路上走, 我也没有阻拦它.”</p><p>“因为我知道, 它以为它正在向圈外走, 就和我当时一样, 也许是以为圈外有什么好东西.”</p><p>我说: “那你觉得你有找到什么 ‘好东西’ 吗?”</p><p>它说: “没什么好东西, 但比起最里面令人窒息的小圈, 这里确实呼吸以及行动上更加自由, 不过…”</p><p>“不过?” 我接他的话道.</p><p>“圈里来的人总是以为这里就是真正的圈外, 喜欢在这些相对自由的地方胡作非为, 大声喧哗. 不过最后它们留下的痕迹总是会被自然而然地抹除, 也无所谓了.” 它的语气变得轻松了起来.</p><p>我问: “你觉得这些 ‘圈’, 或者这些墙是从什么地方来的? 我从里面出来的时候好像没有遇到很多的这些 ‘圈’ ?”</p><p>它说: “我不知道, 我也和后来的人谈过几次这类话题, 他们的回答和你一样, 他们从里面 ‘出来’ 的时候遇到的墙确实和我曾经遇到的数量或者规模上都有很大差别.”</p><p>最后我和它又随便聊了一些有关墙边逐渐侵入的黑藤的事情, 不过由于它很久没有出过这个平原, 所以连黑藤是什么它都不清楚, 它也表示出一种漠不关心的态度, 所以我也没有继续问下去. 离开的时候, 它叮嘱我注意一下平原上的人, 因为它察觉到最近平原上的人越来越稀少了, 如果有什么发现, 希望能够和它分享一下.</p><hr><p>我把这类驻守在「蓝色平原」或者墙上任何地方上的人成为 “守墙人”, 他们虽然身处大多数的圈之外, 但却一般不愿意接触新的事物, 极端的守墙人甚至不愿意和斗篷乃至其他守墙人交流, 还好我遇到的是一个尚且能和其他人交流的斗篷, 不然这个平原上的很多事情我也无从得知.</p><p>之于这个 “圈” 的问题, 在「棋盘」中的时候我就曾经发现, 大部分的小圈中人都不关心圈外的事情, 甚至都不知道有圈存在, 更不用说去突破这圈了, 和这类永远不会走出圈的人打交道总是要先自己去适应它自己的圈, 否则他们也会和守墙人一样把你拒之 “圈” 外.</p><p>但相对的越往外, 这些 “圈” 的意味逐渐变得模糊不清, 没有人知道是谁建起了它, 也从来没有人宣布过自己有关于它的事迹.</p>]]></content:encoded>
  209. <category domain="https://hexo.limour.top/tags/%E8%BD%AC%E8%BD%BD/">转载</category>
  210. <comments>https://hexo.limour.top/repost-in-circle#disqus_thread</comments>
  211. </item>
  212. <item>
  213. <title>【探索】基于WebSocket的内网穿透工具</title>
  214. <link>https://hexo.limour.top/WebSocket-based-intranet-penetration-tool</link>
  215. <guid>https://hexo.limour.top/WebSocket-based-intranet-penetration-tool</guid>
  216. <pubDate>Thu, 09 Nov 2023 11:38:50 GMT</pubDate>
  217. <description>国内的服务器备案麻烦,所以很多内网服务需要使用内网穿透工具。之前尝试使用QUIC来伪装,但不稳定。现在找到了一个特征少的内网穿透工具ProxyNT,可以通过NAT和防火墙将本地服务器暴露到公网上。使用Docker部署服务端和客户端,配置相应的参数后即可使用。</description>
  218. <content:encoded><![CDATA[<p>国内的服务器除了挂个备案,不想再要了。而许多内网的服务需要在外网访问,内网穿透是必不可少的。但是用国外的服务器的话,需要过一层未知的东西,难免被误伤,融入汪洋大海也是必须的。之前折腾了一下通过套一层<a href="/Protocol-for-intranet-penetration-based-on-QUIC">QUIC</a>来伪装,不知道为什么,总是不稳定。寻寻觅觅,又找到一个特征少的内网穿透工具:<a href="https://hexo.limour.top/go/#aHR0cHM6Ly9naXRodWIuY29tL3NhemltYS9wcm94eW50" rel="noopener external nofollow noreferrer">ProxyNT</a> 。ProxyNT是一个用python编写的基于WebSocket的反向代理服务器,可以透过NAT和防火墙将本地服务器暴露到公网上,从原理看,套上一层CDN保护公网ip也是可以的。</p><h2 id="服务端">服务端</h2><ul><li><a href="/Docker-bu-shu-Nginx-Proxy-Manager">反向代理服务</a></li></ul><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><code class="hljs bash"><span class="hljs-built_in">mkdir</span> -p ~/app/proxynt &amp;&amp; <span class="hljs-built_in">cd</span> ~/app/proxynt &amp;&amp; nano Dockerfile &amp;&amp; nano docker-compose.yml<br>docker build -t limour/proxynt .<br>nano config.json<br>sudo docker-compose up -d<br></code></pre></td></tr></table></figure><figure class="highlight dockerfile"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><code class="hljs Dockerfile"><span class="hljs-keyword">FROM</span> python:<span class="hljs-number">3.9</span>-alpine<br><span class="hljs-keyword">RUN</span><span class="language-bash"> pip install -U python-snappy</span><br><span class="hljs-keyword">RUN</span><span class="language-bash"> pip install -U https://github.com/sazima/proxynt/archive/refs/heads/snappy.zip</span><br><span class="hljs-keyword">ENTRYPOINT</span><span class="language-bash"> [<span class="hljs-string">&quot;nt_server&quot;</span>, <span class="hljs-string">&quot;-c&quot;</span>, <span class="hljs-string">&quot;/opt/config.json&quot;</span>]</span><br></code></pre></td></tr></table></figure><figure class="highlight yml"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br></pre></td><td class="code"><pre><code class="hljs yml"><span class="hljs-attr">version:</span> <span class="hljs-string">&#x27;3.3&#x27;</span><br><span class="hljs-attr">services:</span><br> <span class="hljs-attr">proxynt:</span><br> <span class="hljs-attr">restart:</span> <span class="hljs-string">unless-stopped</span><br> <span class="hljs-attr">volumes:</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">&#x27;./config.json:/opt/config.json&#x27;</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">&#x27;/etc/localtime:/etc/localtime:ro&#x27;</span><br> <span class="hljs-attr">image:</span> <span class="hljs-string">limour/proxynt</span><br> <br><span class="hljs-attr">networks:</span><br> <span class="hljs-attr">default:</span><br> <span class="hljs-attr">external:</span> <span class="hljs-literal">true</span><br> <span class="hljs-attr">name:</span> <span class="hljs-string">ngpm</span><br></code></pre></td></tr></table></figure><figure class="highlight json"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br></pre></td><td class="code"><pre><code class="hljs json"><span class="hljs-punctuation">&#123;</span><br> <span class="hljs-attr">&quot;port&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-number">18888</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;log_file&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;/dev/null&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;path&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;/websocket_path&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;password&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;helloworld&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;admin&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-punctuation">&#123;</span><br> <span class="hljs-attr">&quot;enable&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-literal"><span class="hljs-keyword">true</span></span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;admin_password&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;new_password&quot;</span><br> <span class="hljs-punctuation">&#125;</span><br><span class="hljs-punctuation">&#125;</span><br></code></pre></td></tr></table></figure><p><img src="https://img.limour.top/2023/11/09/654cc58f6ea33.webp" alt="反代 proxynt:18888"></p><h2 id="客户端">客户端</h2><ul><li><a href="/-fu-ke-GitHub-wen-jian-jia-su">GitHub 文件加速</a></li></ul><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br></pre></td><td class="code"><pre><code class="hljs bash"><span class="hljs-built_in">mkdir</span> -p ~/app/proxynt &amp;&amp; <span class="hljs-built_in">cd</span> ~/app/proxynt<br><span class="hljs-comment"># pip install --upgrade pip -i https://pypi.tuna.tsinghua.edu.cn/simple</span><br><span class="hljs-comment"># pip install --use-pep517 python-snappy -i https://pypi.tuna.tsinghua.edu.cn/simple</span><br>pip install -U python-snappy -i https://pypi.tuna.tsinghua.edu.cn/simple<br>pip install -U https://xxx.limour.top/token/https://github.com/sazima/proxynt/archive/refs/heads/snappy.zip<br>whereis nt_client<br></code></pre></td></tr></table></figure><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br></pre></td><td class="code"><pre><code class="hljs bash">nano config.json<br>nt_client -c config.json <span class="hljs-comment"># 测试</span><br>nano proxynt.sh &amp;&amp; <span class="hljs-built_in">chmod</span> +x proxynt.sh<br>nano proxynt.service<br>sudo <span class="hljs-built_in">mv</span> proxynt.service /etc/systemd/system/proxynt.service<br>sudo systemctl <span class="hljs-built_in">enable</span> proxynt<br>sudo systemctl start proxynt<br>sudo systemctl status proxynt<br></code></pre></td></tr></table></figure><figure class="highlight json"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br></pre></td><td class="code"><pre><code class="hljs json"><span class="hljs-punctuation">&#123;</span><br> <span class="hljs-attr">&quot;server&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-punctuation">&#123;</span><br> <span class="hljs-attr">&quot;url&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;wss://limour.top:443/websocket_path&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;password&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;helloworld&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;compress&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-literal"><span class="hljs-keyword">true</span></span><br> <span class="hljs-punctuation">&#125;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;client_name&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;home_pc&quot;</span><span class="hljs-punctuation">,</span><br> <span class="hljs-attr">&quot;log_file&quot;</span><span class="hljs-punctuation">:</span> <span class="hljs-string">&quot;/home/limour/app/proxynt/nt.log&quot;</span><br><span class="hljs-punctuation">&#125;</span><br></code></pre></td></tr></table></figure><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><code class="hljs bash"><span class="hljs-meta">#!/bin/sh</span><br><span class="hljs-built_in">export</span> PYTHONPATH=/home/limour/.local/lib/python3.10/site-packages<br>/home/limour/.local/bin/nt_client -c /home/limour/app/proxynt/config.json<br></code></pre></td></tr></table></figure><figure class="highlight ini"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br></pre></td><td class="code"><pre><code class="hljs ini"><span class="hljs-section">[Unit]</span><br><span class="hljs-attr">Description</span>=proxynt<br><span class="hljs-attr">After</span>=network.target<br><span class="hljs-section">[Service]</span><br><span class="hljs-attr">ExecStart</span>=/home/limour/app/proxynt/proxynt.sh<br><span class="hljs-attr">ExecReload</span>=/bin/kill -HUP <span class="hljs-variable">$MAINPID</span><br><span class="hljs-attr">Restart</span>=<span class="hljs-literal">on</span>-failure<br><span class="hljs-section">[Install]</span><br><span class="hljs-attr">WantedBy</span>=multi-user.target<br></code></pre></td></tr></table></figure><ul><li>访问 <code>https://limour.top:443/websocket_path/admin</code></li><li>看到客户端上线后,新建配置即可</li></ul><h2 id="附加-WebSSH">附加 WebSSH</h2><p>和上面的内网穿透配合,连接时host填<code>proxynt</code>,可以保证内网ssh不暴露公网的同时,又能通过公网进行ssh连接。</p><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><code class="hljs bash"><span class="hljs-built_in">mkdir</span> -p ~/app/webssh &amp;&amp; <span class="hljs-built_in">cd</span> ~/app/webssh &amp;&amp; nano docker-compose.yml<br>sudo docker-compose up -d<br></code></pre></td></tr></table></figure><figure class="highlight yml"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br></pre></td><td class="code"><pre><code class="hljs yml"><span class="hljs-attr">version:</span> <span class="hljs-string">&#x27;3.3&#x27;</span><br><span class="hljs-attr">services:</span><br> <span class="hljs-attr">webssh:</span><br> <span class="hljs-attr">restart:</span> <span class="hljs-string">unless-stopped</span><br> <span class="hljs-attr">environment:</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">GIN_MODE=release</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">savePass=true</span><br> <span class="hljs-attr">volumes:</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">&#x27;/etc/localtime:/etc/localtime:ro&#x27;</span><br> <span class="hljs-attr">image:</span> <span class="hljs-string">jrohy/webssh:latest</span><br> <br><span class="hljs-attr">networks:</span><br> <span class="hljs-attr">default:</span><br> <span class="hljs-attr">external:</span> <span class="hljs-literal">true</span><br> <span class="hljs-attr">name:</span> <span class="hljs-string">ngpm</span><br></code></pre></td></tr></table></figure><p><img src="https://img.limour.top/2023/11/10/654d918353361.webp" alt="反代 webssh:5032"></p>]]></content:encoded>
  219. <category domain="https://hexo.limour.top/tags/%E6%8E%A2%E7%B4%A2/">探索</category>
  220. <category domain="https://hexo.limour.top/tags/docker/">docker</category>
  221. <category domain="https://hexo.limour.top/tags/ngpm/">ngpm</category>
  222. <category domain="https://hexo.limour.top/tags/%E5%86%85%E7%BD%91%E7%A9%BF%E9%80%8F/">内网穿透</category>
  223. <category domain="https://hexo.limour.top/tags/ssh/">ssh</category>
  224. <category domain="https://hexo.limour.top/tags/ws/">ws</category>
  225. <comments>https://hexo.limour.top/WebSocket-based-intranet-penetration-tool#disqus_thread</comments>
  226. </item>
  227. <item>
  228. <title>【记录】自建去广告的DoH服务器</title>
  229. <link>https://hexo.limour.top/Self-built-ad-blocking-DoH-server</link>
  230. <guid>https://hexo.limour.top/Self-built-ad-blocking-DoH-server</guid>
  231. <pubDate>Sat, 28 Oct 2023 12:56:54 GMT</pubDate>
  232. <description>&lt;h2 id=&quot;进行部署&quot;&gt;进行部署&lt;/h2&gt;
  233. &lt;ul&gt;
  234. &lt;li&gt;&lt;a href=&quot;/Docker-bu-shu-Nginx-Proxy-Manager&quot;&gt;反代服务&lt;/a&gt;&lt;/li&gt;
  235. &lt;/ul&gt;
  236. &lt;figure class=&quot;highlight bash&quot;&gt;&lt;table&gt;&lt;tr&gt;</description>
  237. <content:encoded><![CDATA[<h2 id="进行部署">进行部署</h2><ul><li><a href="/Docker-bu-shu-Nginx-Proxy-Manager">反代服务</a></li></ul><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><code class="hljs bash"><span class="hljs-built_in">mkdir</span> -p ~/app/adguard &amp;&amp; <span class="hljs-built_in">cd</span> ~/app/adguard &amp;&amp; nano docker-compose.yml<br>sudo docker-compose up -d <span class="hljs-comment"># 面板端口 3000</span><br><span class="hljs-comment"># /opt/adguardhome/letsencrypt/live/npm-1/fullchain.pem</span><br><span class="hljs-comment"># /opt/adguardhome/letsencrypt/live/npm-1/privkey.pem</span><br>sed -i <span class="hljs-string">&#x27;s/allow_unencrypted_doh: false/allow_unencrypted_doh: true/&#x27;</span> ./conf/AdGuardHome.yaml &amp;&amp; sudo docker-compose restart<br></code></pre></td></tr></table></figure><figure class="highlight yml"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br></pre></td><td class="code"><pre><code class="hljs yml"><span class="hljs-attr">version:</span> <span class="hljs-string">&#x27;3.3&#x27;</span><br><span class="hljs-attr">services:</span><br> <span class="hljs-attr">adguard:</span><br> <span class="hljs-attr">restart:</span> <span class="hljs-string">unless-stopped</span><br> <span class="hljs-attr">volumes:</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">&#x27;./work:/opt/adguardhome/work&#x27;</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">&#x27;./conf:/opt/adguardhome/conf&#x27;</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">&#x27;/root/base/NGPM/letsencrypt:/opt/adguardhome/letsencrypt&#x27;</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">&#x27;/etc/localtime:/etc/localtime:ro&#x27;</span><br> <span class="hljs-attr">image:</span> <span class="hljs-string">adguard/adguardhome:latest</span><br> <br><span class="hljs-attr">networks:</span><br> <span class="hljs-attr">default:</span><br> <span class="hljs-attr">external:</span> <span class="hljs-literal">true</span><br> <span class="hljs-attr">name:</span> <span class="hljs-string">ngpm</span><br></code></pre></td></tr></table></figure><p><img src="https://img.limour.top/2023/10/28/653d065c29ba1.webp" alt=""></p><h2 id="DNS设置">DNS设置</h2><ul><li>导航栏-设置-DNS设置</li><li>DNS 服务配置中启用DNSSEC</li></ul><p><img src="https://img.limour.top/2023/10/28/653d06f4936f9.webp" alt=""><br><img src="https://img.limour.top/2023/10/28/653d07482ee29.webp" alt=""></p><h2 id="DoH设置">DoH设置</h2><ul><li>导航栏-设置-DNS设置</li><li>加密中启用加密</li><li>证书可以设置为npm自动申请的证书</li><li>反代 <code>/dns-query</code>, <code>token</code>保密不要泄露</li><li><code>token</code>后面没有<code>/</code>, <code>dns-query</code>后面有<code>/</code></li><li>在chrome的设置-隐私和安全-安全-DNS中填入<code>https://my.com/token</code></li><li>回到仪表盘,看看有没有记录到DNS查询</li></ul><p><img src="https://img.limour.top/2023/10/28/653d07f74eedb.webp" alt=""></p>]]></content:encoded>
  238. <category domain="https://hexo.limour.top/tags/docker/">docker</category>
  239. <category domain="https://hexo.limour.top/tags/ngpm/">ngpm</category>
  240. <category domain="https://hexo.limour.top/tags/doh/">DoH</category>
  241. <comments>https://hexo.limour.top/Self-built-ad-blocking-DoH-server#disqus_thread</comments>
  242. </item>
  243. <item>
  244. <title>【探索】基于QUIC的内网穿透协议</title>
  245. <link>https://hexo.limour.top/Protocol-for-intranet-penetration-based-on-QUIC</link>
  246. <guid>https://hexo.limour.top/Protocol-for-intranet-penetration-based-on-QUIC</guid>
  247. <pubDate>Fri, 27 Oct 2023 12:46:07 GMT</pubDate>
  248. <description>&lt;h2 id=&quot;环境和依赖&quot;&gt;环境和依赖&lt;/h2&gt;
  249. &lt;ul&gt;
  250. &lt;li&gt;&lt;a href=&quot;/-ji-lu--an-zhuang-npsfrp-fu-wu-duan-yu-ke-hu-duan&quot;&gt;内网穿透服务&lt;/a&gt;&lt;/li&gt;
  251. &lt;li&gt;&lt;a href=&quot;/Docker-bu-shu-</description>
  252. <content:encoded><![CDATA[<h2 id="环境和依赖">环境和依赖</h2><ul><li><a href="/-ji-lu--an-zhuang-npsfrp-fu-wu-duan-yu-ke-hu-duan">内网穿透服务</a></li><li><a href="/Docker-bu-shu-Nginx-Proxy-Manager">证书自动申请服务</a></li></ul><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br></pre></td><td class="code"><pre><code class="hljs bash"><span class="hljs-built_in">mkdir</span> -p ~/base/NPS &amp;&amp; <span class="hljs-built_in">cd</span> ~/base/NPS &amp;&amp; <span class="hljs-built_in">mkdir</span> conf<br>nano docker-compose.yml<br>nano conf/nps.conf<br><span class="hljs-built_in">touch</span> conf/&#123;clients,hosts,tasks&#125;.json<br>sudo docker-compose up -d<br><span class="hljs-comment"># 反代 dashboard 8080</span><br></code></pre></td></tr></table></figure><figure class="highlight yml"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br></pre></td><td class="code"><pre><code class="hljs yml"><span class="hljs-attr">version:</span> <span class="hljs-string">&#x27;3.3&#x27;</span><br><span class="hljs-attr">services:</span><br> <span class="hljs-attr">nps:</span><br> <span class="hljs-attr">container_name:</span> <span class="hljs-string">nps</span><br> <span class="hljs-attr">restart:</span> <span class="hljs-string">unless-stopped</span><br> <span class="hljs-attr">ports:</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">&#x27;8025:8025&#x27;</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">&#x27;6000-6002:6000-6002/udp&#x27;</span><br> <span class="hljs-attr">volumes:</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">&#x27;./conf:/conf&#x27;</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">&#x27;/etc/localtime:/etc/localtime:ro&#x27;</span><br> <span class="hljs-attr">image:</span> <span class="hljs-string">yisier1/nps</span><br> <br><span class="hljs-attr">networks:</span><br> <span class="hljs-attr">default:</span><br> <span class="hljs-attr">external:</span> <span class="hljs-literal">true</span><br> <span class="hljs-attr">name:</span> <span class="hljs-string">ngpm</span><br></code></pre></td></tr></table></figure><figure class="highlight plaintext"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br><span class="line">20</span><br><span class="line">21</span><br><span class="line">22</span><br><span class="line">23</span><br><span class="line">24</span><br><span class="line">25</span><br><span class="line">26</span><br><span class="line">27</span><br><span class="line">28</span><br><span class="line">29</span><br><span class="line">30</span><br><span class="line">31</span><br><span class="line">32</span><br><span class="line">33</span><br><span class="line">34</span><br><span class="line">35</span><br><span class="line">36</span><br><span class="line">37</span><br><span class="line">38</span><br><span class="line">39</span><br><span class="line">40</span><br><span class="line">41</span><br><span class="line">42</span><br><span class="line">43</span><br><span class="line">44</span><br><span class="line">45</span><br><span class="line">46</span><br><span class="line">47</span><br><span class="line">48</span><br><span class="line">49</span><br><span class="line">50</span><br><span class="line">51</span><br><span class="line">52</span><br><span class="line">53</span><br><span class="line">54</span><br><span class="line">55</span><br><span class="line">56</span><br><span class="line">57</span><br><span class="line">58</span><br><span class="line">59</span><br><span class="line">60</span><br><span class="line">61</span><br><span class="line">62</span><br><span class="line">63</span><br><span class="line">64</span><br><span class="line">65</span><br><span class="line">66</span><br><span class="line">67</span><br><span class="line">68</span><br><span class="line">69</span><br><span class="line">70</span><br><span class="line">71</span><br><span class="line">72</span><br><span class="line">73</span><br><span class="line">74</span><br><span class="line">75</span><br><span class="line">76</span><br><span class="line">77</span><br><span class="line">78</span><br><span class="line">79</span><br><span class="line">80</span><br><span class="line">81</span><br><span class="line">82</span><br></pre></td><td class="code"><pre><code class="hljs conf">appname = nps<br>#Boot mode(dev|pro)<br>runmode = pro<br> <br>#HTTP(S) proxy port, no startup if empty<br>http_proxy_ip=0.0.0.0<br>http_proxy_port=18081<br> <br>##bridge<br>bridge_type=tcp<br>bridge_port=8024<br>bridge_ip=0.0.0.0<br>tls_bridge_port=8025<br>tls_enable=true<br><br>#Traffic data persistence interval(minute)<br>#Ignorance means no persistence<br>#flow_store_interval=1<br> <br># log level LevelEmergency-&gt;0 LevelAlert-&gt;1 LevelCritical-&gt;2 LevelError-&gt;3 LevelWarning-&gt;4 LevelNotice-&gt;5 LevelInformational-&gt;6 LevelDebug-&gt;7<br>log_level=7<br>#log_path=nps.log<br> <br>#Whether to restrict IP access, true or false or ignore<br>#ip_limit=true<br> <br>#allow_ports=9001-9009,10001,11000-12000<br> <br>#Web management multi-user login<br>allow_user_login=false<br>allow_user_register=false<br>allow_user_change_username=false<br> <br>#extension<br>allow_flow_limit=false<br>allow_rate_limit=false<br>allow_tunnel_num_limit=false<br>allow_local_proxy=false<br>allow_connection_num_limit=false<br>allow_multi_ip=false<br>system_info_display=true<br> <br>#cache<br>http_cache=false<br>http_cache_length=100<br> <br>#get origin ip<br>http_add_origin_header=true<br> <br>#pprof debug options<br>#pprof_ip=0.0.0.0<br>#pprof_port=9999<br> <br>#client disconnect timeout<br>disconnect_timeout=60<br> <br># 以下的需要进行配置<br># Public password, which clients can use to connect to the server<br># After the connection, the server will be able to open relevant ports and parse related domain names according to its own configuration file.<br>public_vkey=&lt;16个字符&gt;<br> <br>#Web API unauthenticated IP address(the len of auth_crypt_key must be 16)<br>#Remove comments if needed<br>auth_key=&lt;24个字符&gt;<br>auth_crypt_key=&lt;16个字符&gt;<br> <br>#web<br>web_host=limour.top<br>web_username=Limour<br>web_password=&lt;16个字符&gt;<br>web_port = 8080<br>web_ip=0.0.0.0<br>web_open_ssl=false<br>web_base_url=<br>open_captcha=true<br># if web under proxy use sub path. like http://host/nps need this.<br>#web_base_url=/nps<br> <br>#p2p<br>p2p_ip=&lt;写服务器的ip&gt;<br>p2p_port=6000<br># 设置为6000,请在控制台防火墙开放6000~6002(额外添加2个端口)udp端口<br></code></pre></td></tr></table></figure><h2 id="配置端口映射">配置端口映射</h2><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><code class="hljs bash">nano Port-Hopping.sh &amp;&amp; <span class="hljs-built_in">chmod</span> +x Port-Hopping.sh<br>nano /etc/systemd/system/Port-Hopping.service<br>systemctl <span class="hljs-built_in">enable</span> Port-Hopping &amp;&amp; systemctl start Port-Hopping &amp;&amp; systemctl status Port-Hopping &amp;&amp; iptables -t nat -L<br></code></pre></td></tr></table></figure><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br></pre></td><td class="code"><pre><code class="hljs bash"><span class="hljs-meta">#!/bin/bash</span><br><span class="hljs-comment"># IPv4</span><br>/usr/sbin/iptables -t nat -A PREROUTING -i eth0 -p udp --dport 32768:61000 -j DNAT --to-destination :3234<br><span class="hljs-comment"># IPv6</span><br>/usr/sbin/ip6tables -t nat -A PREROUTING -i eth0 -p udp --dport 32768:61000 -j DNAT --to-destination :3234<br></code></pre></td></tr></table></figure><figure class="highlight plaintext"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br></pre></td><td class="code"><pre><code class="hljs service">[Unit]<br>Description=Port-Hopping<br>After=network.target docker.service<br>[Service]<br>ExecStart=/root/Port-Hopping.sh<br>Restart=on-failure<br>[Install]<br>WantedBy=multi-user.target<br></code></pre></td></tr></table></figure><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br></pre></td><td class="code"><pre><code class="hljs bash">iptables -t nat -A DOCKER -p udp --dport 32768:61000 -j DNAT --to-destination `iptables -t nat -L| grep <span class="hljs-string">&quot;udp dpt:3234&quot;</span> | grep -oP <span class="hljs-string">&#x27;to:\K[^ ]+&#x27;</span>` <span class="hljs-comment"># 添加</span><br>iptables -t nat -D DOCKER -p udp --dport 32768:61000 -j DNAT --to-destination `iptables -t nat -L| grep <span class="hljs-string">&quot;udp dpts:32768:61000&quot;</span>| <span class="hljs-built_in">tail</span> -n 1 | grep -oP <span class="hljs-string">&#x27;to:\K[^ ]+&#x27;</span>` <span class="hljs-comment"># 删除</span><br></code></pre></td></tr></table></figure><h2 id="配置quic">配置quic</h2><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br></pre></td><td class="code"><pre><code class="hljs bash">sudo docker network create sswitch<br><span class="hljs-built_in">mkdir</span> -p ~/app/quic &amp;&amp; <span class="hljs-built_in">cd</span> ~/app/quic &amp;&amp; nano docker-compose.yml<br>nano hysteria.yaml<br>sudo docker-compose up -d &amp;&amp; sudo docker-compose logs<br></code></pre></td></tr></table></figure><figure class="highlight yml"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br></pre></td><td class="code"><pre><code class="hljs yml"><span class="hljs-attr">version:</span> <span class="hljs-string">&#x27;3.9&#x27;</span><br><span class="hljs-attr">services:</span><br> <span class="hljs-attr">hysteria:</span><br> <span class="hljs-attr">image:</span> <span class="hljs-string">tobyxdd/hysteria</span><br> <span class="hljs-attr">restart:</span> <span class="hljs-string">always</span><br> <span class="hljs-attr">extra_hosts:</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">&#x27;host.docker.internal:host-gateway&#x27;</span><br> <span class="hljs-attr">ports:</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">&#x27;3234:3234/udp&#x27;</span><br> <span class="hljs-attr">volumes:</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">./hysteria.yaml:/etc/hysteria.yaml</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">/root/base/NGPM/letsencrypt:/home/ubuntu/letsencrypt</span><br> <span class="hljs-attr">command:</span> [<span class="hljs-string">&quot;server&quot;</span>, <span class="hljs-string">&quot;-c&quot;</span>, <span class="hljs-string">&quot;/etc/hysteria.yaml&quot;</span>]<br> <br><span class="hljs-attr">networks:</span><br> <span class="hljs-attr">default:</span><br> <span class="hljs-attr">external:</span> <span class="hljs-literal">true</span><br> <span class="hljs-attr">name:</span> <span class="hljs-string">sswitch</span><br></code></pre></td></tr></table></figure><figure class="highlight yml"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br><span class="line">18</span><br><span class="line">19</span><br></pre></td><td class="code"><pre><code class="hljs yml"><span class="hljs-attr">listen:</span> <span class="hljs-string">:3234</span> <br> <br><span class="hljs-attr">tls:</span><br> <span class="hljs-attr">cert:</span> <span class="hljs-string">/home/ubuntu/letsencrypt/live/npm-1/fullchain.pem</span><br> <span class="hljs-attr">key:</span> <span class="hljs-string">/home/ubuntu/letsencrypt/live/npm-1/privkey.pem</span><br> <br><span class="hljs-attr">auth:</span><br> <span class="hljs-attr">type:</span> <span class="hljs-string">password</span><br> <span class="hljs-attr">password:</span> <span class="hljs-string">Se7RAuFZ8Lzg</span> <br> <br><span class="hljs-attr">bandwidth:</span><br> <span class="hljs-attr">up:</span> <span class="hljs-number">3</span> <span class="hljs-string">mbps</span><br> <span class="hljs-attr">down:</span> <span class="hljs-number">3</span> <span class="hljs-string">mbps</span><br> <br><span class="hljs-attr">masquerade:</span> <br> <span class="hljs-attr">type:</span> <span class="hljs-string">proxy</span><br> <span class="hljs-attr">proxy:</span><br> <span class="hljs-attr">url:</span> <span class="hljs-string">https://hexo.limour.top/</span> <br> <span class="hljs-attr">rewriteHost:</span> <span class="hljs-literal">true</span><br></code></pre></td></tr></table></figure><h2 id="测试转发">测试转发</h2><ul><li>在客户端新建<code>config.yaml</code>, 写入以下内容</li></ul><figure class="highlight yml"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br></pre></td><td class="code"><pre><code class="hljs yml"><span class="hljs-attr">server:</span> <span class="hljs-string">hexo.limour.top:32768-61000</span><br> <br><span class="hljs-attr">auth:</span> <span class="hljs-string">Se7RAuFZ8Lzg</span> <br> <br><span class="hljs-attr">bandwidth:</span> <br> <span class="hljs-attr">up:</span> <span class="hljs-number">3</span> <span class="hljs-string">mbps</span><br> <span class="hljs-attr">down:</span> <span class="hljs-number">3</span> <span class="hljs-string">mbps</span><br> <br><span class="hljs-comment">#socks5:</span><br><span class="hljs-comment"># listen: 127.0.0.1:1580 </span><br> <br><span class="hljs-comment">#http:</span><br><span class="hljs-comment"># listen: 127.0.0.1:8580 </span><br> <br><span class="hljs-attr">tcpForwarding:</span><br> <span class="hljs-bullet">-</span> <span class="hljs-attr">listen:</span> <span class="hljs-number">127.0</span><span class="hljs-number">.0</span><span class="hljs-number">.1</span><span class="hljs-string">:8025</span> <br> <span class="hljs-attr">remote:</span> <span class="hljs-string">host.docker.internal:8025</span> <br></code></pre></td></tr></table></figure><h2 id="测试穿透">测试穿透</h2><figure class="highlight powershell"><table><tr><td class="gutter"><pre><span class="line">1</span><br></pre></td><td class="code"><pre><code class="hljs powershell">.\npc.exe <span class="hljs-literal">--server</span>=<span class="hljs-number">127.0</span>.<span class="hljs-number">0.1</span>:<span class="hljs-number">8024</span> <span class="hljs-literal">-vkey</span>=&lt;vkey&gt; <span class="hljs-literal">-type</span>=tcp<br></code></pre></td></tr></table></figure><h2 id="客户端示例">客户端示例</h2><figure class="highlight bash"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br></pre></td><td class="code"><pre><code class="hljs bash"><span class="hljs-built_in">mkdir</span> -p ~/app/quic-npc &amp;&amp; <span class="hljs-built_in">cd</span> ~/app/quic-npc &amp;&amp; nano docker-compose.yml<br>nano config.yaml<br>sudo docker-compose up -d &amp;&amp; sudo docker-compose logs<br></code></pre></td></tr></table></figure><figure class="highlight yml"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br><span class="line">12</span><br><span class="line">13</span><br><span class="line">14</span><br><span class="line">15</span><br><span class="line">16</span><br><span class="line">17</span><br></pre></td><td class="code"><pre><code class="hljs yml"><span class="hljs-attr">version:</span> <span class="hljs-string">&#x27;3.3&#x27;</span><br><span class="hljs-attr">services:</span><br> <span class="hljs-attr">quic_nps:</span><br> <span class="hljs-attr">image:</span> <span class="hljs-string">tobyxdd/hysteria</span><br> <span class="hljs-attr">network_mode:</span> <span class="hljs-string">host</span><br> <span class="hljs-attr">restart:</span> <span class="hljs-string">always</span><br> <span class="hljs-attr">volumes:</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">./config.yaml:/etc/config.yaml</span><br> <span class="hljs-attr">command:</span> [<span class="hljs-string">&quot;--config&quot;</span>, <span class="hljs-string">&quot;/etc/config.yaml&quot;</span>]<br> <br> <span class="hljs-attr">npc_lk:</span><br> <span class="hljs-attr">depends_on:</span><br> <span class="hljs-bullet">-</span> <span class="hljs-string">quic_nps</span><br> <span class="hljs-attr">network_mode:</span> <span class="hljs-string">host</span><br> <span class="hljs-attr">image:</span> <span class="hljs-string">yisier1/npc</span><br> <span class="hljs-attr">restart:</span> <span class="hljs-string">unless-stopped</span><br> <span class="hljs-attr">command:</span> <span class="hljs-string">-server=127.0.0.1:8025</span> <span class="hljs-string">-vkey=&lt;vkey&gt;</span> <span class="hljs-string">-tls_enable=true</span><br></code></pre></td></tr></table></figure><figure class="highlight yml"><table><tr><td class="gutter"><pre><span class="line">1</span><br><span class="line">2</span><br><span class="line">3</span><br><span class="line">4</span><br><span class="line">5</span><br><span class="line">6</span><br><span class="line">7</span><br><span class="line">8</span><br><span class="line">9</span><br><span class="line">10</span><br><span class="line">11</span><br></pre></td><td class="code"><pre><code class="hljs yml"><span class="hljs-attr">server:</span> <span class="hljs-string">hexo.limour.top:32768-61000</span><br> <br><span class="hljs-attr">auth:</span> <span class="hljs-string">Se7RAuFZ8Lzg</span> <br> <br><span class="hljs-attr">bandwidth:</span> <br> <span class="hljs-attr">up:</span> <span class="hljs-number">3</span> <span class="hljs-string">mbps</span><br> <span class="hljs-attr">down:</span> <span class="hljs-number">3</span> <span class="hljs-string">mbps</span><br> <br><span class="hljs-attr">tcpForwarding:</span><br> <span class="hljs-bullet">-</span> <span class="hljs-attr">listen:</span> <span class="hljs-number">127.0</span><span class="hljs-number">.0</span><span class="hljs-number">.1</span><span class="hljs-string">:8025</span> <br> <span class="hljs-attr">remote:</span> <span class="hljs-string">host.docker.internal:8025</span> <br></code></pre></td></tr></table></figure>]]></content:encoded>
  253. <category domain="https://hexo.limour.top/tags/%E6%8E%A2%E7%B4%A2/">探索</category>
  254. <category domain="https://hexo.limour.top/tags/docker/">docker</category>
  255. <category domain="https://hexo.limour.top/tags/ngpm/">ngpm</category>
  256. <category domain="https://hexo.limour.top/tags/%E5%86%85%E7%BD%91%E7%A9%BF%E9%80%8F/">内网穿透</category>
  257. <comments>https://hexo.limour.top/Protocol-for-intranet-penetration-based-on-QUIC#disqus_thread</comments>
  258. </item>
  259. </channel>
  260. </rss>