nvidia chat with rtx 安装教程

蹭个热度,写个教程

第一步,打开官网,下载安装包

使用 Chat with RTX 构建定制 LLM | NVIDIA

安装包有35g,特别大,但是我用nas配合千兆网下载的,速度非常快,10分钟左右。

反而考到本地用了半小时。解压10分钟。

第二步,运行安装包。无脑下一步。

第三步,等待进度条,这里有一些下载的东西,需要梯子

没有梯子怎么办,

下面是骚操作,\RAG\RAG.nvi,改成如下,修改了一些安装提示,可以知道安装卡在哪步

<?xml version="1.0" encoding="utf-8"?>
<nvi
  name      = "RAG"
  title     = "${{title}}"
  version   = "${{version}}"
  versionText = "${{version}}"
  timestamp = "2009-09-01T10:30:00.000"
  disposition = "critical"
  allowsCustomPath ="true"
>
  <filter name="OfflineMiniCondaMSMPI"/>
  <filter name="OfflineRAGZip"/>
  <include name=".\strings.dat" select="/nvi/*"/>

  <strings>
    <string name="title" value="Chat With RTX"/>
    <string name="version" value="0.2"/>
    <string name="arpDisplayName" value="NVIDIA Chat With RTX"/>
    <string name="MinSupportedVRAMSize" value="7"/>
    <string name="MinSupportedDriverVersion" value="535.11"/>
    <string name="DemoRunCmd" value="app_launch.bat"/>
  </strings>

  <properties>
    <bool name="FixedInstallLocation" value="true"/>
    <string name="InstallLocation" value="${{InstallationFolder}}"/>
    <string name="MSMPI_InstallerPath" value="${{MSMPI_InstallerPath}}"/>
    <string name="MinSupportedVRAMSize" value="${{MinSupportedVRAMSize}}" />
    <string name="MinSupportedDriverVersion" value="${{MinSupportedDriverVersion}}"/>
    <bool name="LaunchApp" value="true"/>
    <string name="ArpDisplayName" value="${{arpDisplayName}}"/>
  </properties>

  <constraints>
    <custom name="GpuSupportCheck" level="error" text="${{IncompatibleGpu}}"/>
    <custom name="VRAMSizeSupportCheck" level="error" text="${{IncompatibleVRAMSize}}"/>
    <custom name="CheckDriverVersion" level="error" text="${{IncompatibleNVDriverVersion}}"/>
  </constraints>

  <extensions>
    <extension file="RagLLMExt.DLL" />
  </extensions>

  <phases>
    <standard phase="createTargetDirectory" when="install">
      <createDirectory target="${{InstallLocation}}"/>
      <createDirectory target="${{RagInstallerCache}}"/>
      <copyFile target="icon.ico"/>
      <copyFile target="Cuda_EULA.txt"/>
      <copyFile target="CudaPython_LICENSE.txt"/>
      <copyFile target="TensorRT-SLA.pdf"/>
      <copyFile target="cuddn_License.txt"/>
      <copyFile target="MicrosoftMPI-Redistributable-EULA.txt"/>
    </standard>

    <custom phase="downloadTrtLLM" action="DownloadFromURL" when="install" installText="downloading TrtLLM_URL...">
      <properties>
        <string name="DownloadURL" value="${{TrtLLM_URL}}"/>
        <string name="DownloadFilePath" value="${{TrtLLM_ZIPPath}}"/>
      </properties>
    </custom>

<if filter="!OfflineRAGZip">
    <custom phase="downloadRAG" action="DownloadFromURL" when="install">
      <properties>
        <string name="DownloadURL" value="${{RAG_URL}}"/>
        <string name="DownloadFilePath" value="${{RAG_ZIPPath}}"/>
      </properties>
    </custom>
</if>

    <setPropertyIf phase="checkIfMiniCondaNotExists" boolProperty="MiniCondaNotExists" value="true" when="install">
      <directory name="${{MiniCondaPath}}" check="notExists" />
    </setPropertyIf>

<if filter="!OfflineMiniCondaMSMPI">
    <custom phase="downloadMiniConda" condition="MiniCondaNotExists" action="DownloadFromURL" when="install" installText="downloading MiniConda_URL ...">
      <properties>
        <string name="DownloadURL" value="${{MiniConda_URL}}"/>
        <string name="DownloadFilePath" value="${{MiniConda_InstallerPath}}"/>
      </properties>
    </custom>
</if>

    <setPropertyIf phase="checkIfMSMPINotExists" boolProperty="MSMPINotExists" value="true" when="install">
      <directory name="C:\Program Files\Microsoft MPI" check="notExists" />
    </setPropertyIf>

<if filter="!OfflineMiniCondaMSMPI">
    <custom phase="downloadMSMPI" condition="MSMPINotExists" action="DownloadFromURL" when="install" installText="downloading MSMPI_URL...">
      <properties>
        <string name="DownloadURL" value="${{MSMPI_URL}}"/>
        <string name="DownloadFilePath" value="${{MSMPI_InstallerPath}}"/>
      </properties>
    </custom>
</if>

    <exe phase="extractTensorRT-LLM" when="install" name="${{SystemDirectory}}\WindowsPowerShell\v1.0\powershell.exe" onError="ignore" progress="50" autotick="20" installText="extracting...">
      <arg value="-Command" quote="none"/>
      <arg value ="Expand-Archive -LiteralPath '${{RagInstallerCache}}\${{TrtLLM_ZIP}}' -DestinationPath '${{TrtLLMExtractionPath}}' -Force" quote="none"/>
      <success value="0"/>
    </exe>

<if filter="OfflineRAGZip">
    <exe phase="extractRAG" when="install" name="${{SystemDirectory}}\WindowsPowerShell\v1.0\powershell.exe" onError="ignore" progress="50" autotick="20" installText="extracting...">
      <arg value="-Command" quote="none"/>
      <arg value ="Expand-Archive -LiteralPath '${{SubPackageDirectory}}\${{RAG_ZIP}}' -DestinationPath '${{RagExtractionPath}}' -Force" quote="none"/>
      <success value="0"/>
    </exe>
</if>

<if filter="!OfflineRAGZip">
    <exe phase="extractRAG" when="install" name="${{SystemDirectory}}\WindowsPowerShell\v1.0\powershell.exe" onError="ignore" progress="50" autotick="20" installText="extracting...">
      <arg value="-Command" quote="none"/>
      <arg value ="Expand-Archive -LiteralPath '${{RagInstallerCache}}\${{RAG_ZIP}}' -DestinationPath '${{RagExtractionPath}}' -Force" quote="none"/>
      <success value="0"/>
    </exe>
</if>

    <custom phase="installMSMPI" condition="MSMPINotExists" action="InstallMSMPI" when="install" installText="installing MSMPI..."/>

    <exe phase="installMiniConda" condition="MiniCondaNotExists" when="install" name="${{SystemDirectory}}\WindowsPowerShell\v1.0\powershell.exe" progress="100" autotick="20" installText="installing miniconda...">
      <arg value ="start -wait -FilePath '${{MiniConda_InstallerPath}}' -ArgumentList '/AddToPath=0', '/RegisterPython=0', '/S', '/D=${{MiniCondaPath}}'" quote="none"/>
      <success value="0"/>
    </exe>

    <setPropertyIf phase="checkIfMiniCondaEnvNotExists" boolProperty="MiniCondaEnvNotExists" value="true" when="install" >
      <directory name="${{MinicondaEnvPath}}" check="notExists" />
    </setPropertyIf>

    <exe phase="createPythonEnv" condition="MiniCondaEnvNotExists" when="install" name="${{SystemDirectory}}\WindowsPowerShell\v1.0\powershell.exe" progress="50" autotick="20" installText="creating miniconda environment...">
      <arg value ="${{MiniCondaPath}}\Scripts\conda.exe create -p ${{MinicondaEnvPath}} python=3.10 -y" quote="none"/>
      <success value="0"/>
    </exe>

    <exe phase="setChannelPriority" condition="MiniCondaEnvNotExists" when="install" name="${{SystemDirectory}}\WindowsPowerShell\v1.0\powershell.exe" progress="50" autotick="20" installText="creating miniconda environment...">
      <arg value ="${{MiniCondaPath}}\Scripts\conda.exe config --set channel_priority strict" quote="none"/>
      <success value="0"/>
    </exe>

    <exe phase="installCudaToolKit" condition="MiniCondaEnvNotExists" when="install" name="${{SystemDirectory}}\WindowsPowerShell\v1.0\powershell.exe" progress="100" autotick="20" installText="installing cuda-toolkit...">
      <arg value ="${{MiniCondaPath}}\Scripts\conda.exe install -p ${{MinicondaEnvPath}} -c nvidia/label/cuda-12.2.1 cuda-toolkit -y" quote="none"/>
      <success value="0"/>
    </exe>

    <environment phase="setEnvironmentVariable">
      <insertVariable type="system" variableName="PATH" value="${{MinicondaEnvPath}}\Lib\site-packages\torch\lib" delimiter=";" position="append"/>
    </environment>

    <custom phase="execRag_PIPCmd" action="ExecuteCommand" when="install" installText="downloading Rag_PIPCmd dependencies...">
      <properties>
        <string name="ExecCommand" value="${{Rag_PIPCmd}}"/>
      </properties>
    </custom>

    <custom phase="execTorch_PIPCmd" action="ExecuteCommand" when="install" installText="downloading Torch_PIPCmd dependencies...">
      <properties>
        <string name="ExecCommand" value="${{Torch_PIPCmd}}"/>
      </properties>
    </custom>

    <custom phase="execCudnn_PIPCmd" action="ExecuteCommand" when="install" installText="downloading Cudnn_PIPCmd dependencies...">
      <properties>
        <string name="ExecCommand" value="${{Cudnn_PIPCmd}}"/>
      </properties>
    </custom>

    <custom phase="execTensor_PIPCmd" action="ExecuteCommand" when="install" installText="downloading Tensor_PIPCmd dependencies...">
      <properties>
        <string name="ExecCommand" value="${{Tensor_PIPCmd}}"/>
      </properties>
    </custom>

    <custom phase="execCudnnUninstall_PIPCmd" action="ExecuteCommand" when="install" installText="downloading CudnnUninstall_PIPCmd dependencies...">
      <properties>
        <string name="ExecCommand" value="${{CudnnUninstall_PIPCmd}}"/>
      </properties>
    </custom>

    <custom phase="execTrtLLM_PIPCmd" action="ExecuteCommand" when="install" installText="downloading TrtLLM_PIPCmd dependencies...">
      <properties>
        <string name="ExecCommand" value="${{TrtLLM_PIPCmd}}"/>
      </properties>
    </custom>

<if filter="!OfflineMiniCondaMSMPI">
    <standard phase="deleteDownloadedFiles" when="install" onError="ignore">
      <deleteFile target="${{MiniConda_InstallerPath}}" />
      <deleteFile target="${{MSMPI_InstallerPath}}" />
    </standard>
</if>

    <standard phase="deleteDownloadedFiles" when="install" onError="ignore">
      <deleteFile target="${{TrtLLM_ZIPPath}}" />
      <deleteFile target="${{RAG_ZIPPath}}" />
      <deleteDirectory target="${{RagInstallerCache}}" />
    </standard>

    <!-- Run the application demo -->
    <exe phase="execDemo" condition="LaunchApp" when="installClose" dir="${{RagExtractionPath}}\${{RAG_FileName}}" name="${{SystemDirectory}}\WindowsPowerShell\v1.0\powershell.exe" onError="ignore" progress="50" autotick="20">
      <arg value ="start -FilePath ${{DemoRunCmd}}" quote="none"/>
      <success value="0"/>
    </exe>

    <!-- delete logging directory -->
    <standard phase="cleanupDirectories" when="uninstall" condition="Global:IsUninstallInitiated" onError="ignore" uninstallText="removing RAG directories...">
      <deleteDirectoryTree target="${{LocalUserAppData}}\NVIDIA\ChatWithRTX\InstallerLogs" />
      <deleteDirectory target="${{LocalUserAppData}}\NVIDIA\ChatWithRTX" />
    </standard>

    <standard phase="cleanupDirectories" when="uninstall" onError="ignore" uninstallText="removing RAG directories...">
      <deleteFile target="${{InstallLocation}}\Cuda_EULA.txt"/>
      <deleteFile target="${{InstallLocation}}\CudaPython_LICENSE.txt"/>
      <deleteFile target="${{InstallLocation}}\TensorRT-SLA.pdf"/>
      <deleteFile target="${{InstallLocation}}\cuddn_License.txt"/>
      <deleteFile target="${{InstallLocation}}\MicrosoftMPI-Redistributable-EULA.txt"/>
      <deleteFile target="${{InstallLocation}}\icon.ico"/>
      <deleteDirectoryTree target="${{TrtLLMExtractionPath}}" />
      <deleteDirectoryTree target="${{RagExtractionPath}}" />
      <deleteDirectory target="${{InstallLocation}}"/>
    </standard>

    <exe phase="uninstallMiniconda" when="uninstall" uninstallText="removing miniconda..." name="${{SystemDirectory}}\WindowsPowerShell\v1.0\powershell.exe" onError="ignore" progress="50" autotick="20">
      <arg value ="start -wait -FilePath '${{MiniCondaPath}}\Uninstall-Miniconda3.exe' -ArgumentList '/S'" quote="none"/>
      <success value="0"/>
    </exe>

    <exe phase="deleteCondaEnv" when="uninstall" uninstallText="removing miniconda environment..." name="${{SystemDirectory}}\WindowsPowerShell\v1.0\powershell.exe" onError="ignore" progress="50" autotick="20">
      <arg value ="${{MiniCondaPath}}\Scripts\conda.exe remove -p ${{MinicondaEnvPath}} --all --yes" quote="none"/>
      <success value="0"/>
    </exe>

    <exe phase="killPython" when="uninstall" name="${{SystemDirectory}}\WindowsPowerShell\v1.0\powershell.exe" onError="ignore" progress="50" autotick="20">
      <arg value ="Get-WmiObject Win32_Process | Where-Object { $_.Path -eq '${{MinicondaEnvPath}}\python.exe' } | ForEach-Object { $_.Terminate() }" quote="none"/>
      <success value="0"/>
    </exe>

    <standard phase="deleteDesktopShortcut" when="uninstall" onError="ignore">
      <deleteShortcutLink linkName="NVIDIA Chat with RTX" location="desktop" />
    </standard>

  </phases>

  <finishOptions>
    <option name="LaunchApp" text="Launch the &quot;Chat with RTX&quot; application" property="LaunchApp" condition="LaunchApp" />
  </finishOptions>

  <manifest>
    <file name="RAG.nvi" />
    <file name="RagLLMExt.dll" />
    <file name="Cuda_EULA.txt"/>
    <file name="CudaPython_LICENSE.txt"/>
    <file name="TensorRT-SLA.pdf"/>
    <file name="cuddn_License.txt"/>
    <file name="MicrosoftMPI-Redistributable-EULA.txt"/>
    <file name="icon.ico"/>
<if filter="OfflineMiniCondaMSMPI">
    <file name="Miniconda3-latest-Windows-x86_64.exe"/>
    <file name="msmpisetup.exe"/>
</if>
<if filter="OfflineRAGZip">
    <file name="trt-llm-rag-windows-main.zip"/>
</if>
  </manifest>

</nvi>

改\RAG\string.dat,最后几行,把里面的软件源改成阿里云的软件源,清华源也可以。

<?xml version="1.0" encoding="utf-8"?>
<nvi>
  <strings>
    <string name="InstallationFolder" value="${{LocalUserAppData}}\NVIDIA\ChatWithRTX"/>
    <string name="RagInstallerCache" value="${{InstallLocation}}\Cache"/>
    <string name="MiniCondaPath" value="${{LocalUserAppData}}\NVIDIA\MiniConda"/>
    <string name="MinicondaEnv" value="env_nvd_rag"/>
    <string name="MinicondaEnvPath" value="${{InstallLocation}}\${{MinicondaEnv}}"/>

    <string name="MiniConda_URL" value="https://repo.anaconda.com/miniconda/${{MiniConda_Installer}}" />
    <string name="MiniConda_Installer" value="Miniconda3-latest-Windows-x86_64.exe" />

<if filter="!OfflineMiniCondaMSMPI">
    <string name="MiniConda_InstallerPath" value="${{RagInstallerCache}}\Miniconda3-latest-Windows-x86_64.exe" />
</if>

<if filter="OfflineMiniCondaMSMPI">
    <string name="MiniConda_InstallerPath" value="${{SubPackageDirectory}}\Miniconda3-latest-Windows-x86_64.exe" />
</if>

    <string name="TrtLLM_URL" value="https://github.com/NVIDIA/TensorRT-LLM/archive/refs/tags/v0.7.0.zip" />
    <string name="TrtLLM_FileName" value="TensorRT-LLM-0.7.0"/>
    <string name="TrtLLM_ZIP" value="${{TrtLLM_FileName}}.zip" />
    <string name="TrtLLM_ZIPPath" value="${{RagInstallerCache}}\${{TrtLLM_ZIP}}"/>

    <string name="RAG_URL" value="https://github.com/NVIDIA/trt-llm-rag-windows/archive/refs/heads/release/1.0.zip" />
    <string name="RAG_FileName" value="trt-llm-rag-windows-main" />
    <string name="RAG_ZIP" value="${{RAG_FileName}}.zip" />
    <string name="RAG_ZIPPath" value="${{RagInstallerCache}}\${{RAG_ZIP}}"/>

    <string name="MSMPI_URL" value="https://github.com/microsoft/Microsoft-MPI/releases/download/v10.1.1/msmpisetup.exe" />
    <string name="MSMPI_Installer" value="msmpisetup.exe" />

<if filter="!OfflineMiniCondaMSMPI">
    <string name="MSMPI_InstallerPath" value="${{RagInstallerCache}}\${{MSMPI_Installer}}"/>
</if>

<if filter="OfflineMiniCondaMSMPI">
    <string name="MSMPI_InstallerPath" value="${{SubPackageDirectory}}\${{MSMPI_Installer}}"/>
</if>

    <string name="TrtLLMExtractionPath" value="${{InstallLocation}}\TensorRT-LLM"/>
    <string name="RagExtractionPath" value="${{InstallLocation}}\RAG"/>

    <string name="RagModelDirectory" value="${{RagExtractionPath}}\${{RAG_FileName}}\model"/>

    <string name="MiniCondaActivateCmd" value="${{MiniCondaPath}}\Scripts\activate.bat"/>
    <string name="MiniCondaEnvActivate" value="${{MiniCondaActivateCmd}} ${{MinicondaEnvPath}}"/>
 
    <string name="Rag_PIPCmd" value="${{MiniCondaEnvActivate}} &amp;&amp; pip install -r ${{RagExtractionPath}}\${{RAG_FileName}}\requirements.txt --extra-index-url https://mirrors.aliyun.com/pypi/simple"/>
    <string name="TrtLLM_PIPCmd" value="${{MiniCondaEnvActivate}} &amp;&amp; pip install tensorrt_llm==0.7.0 --extra-index-url https://mirrors.aliyun.com/pypi/simple  --extra-index-url https://pypi.nvidia.com --extra-index-url https://download.pytorch.org/whl/cu121"/>
    <string name="Torch_PIPCmd" value="${{MiniCondaEnvActivate}} &amp;&amp; pip install torch==2.1.0+cu121 --extra-index-url https://mirrors.aliyun.com/pypi/simple" />
    <string name="Cudnn_PIPCmd" value="${{MiniCondaEnvActivate}} &amp;&amp; pip install nvidia-cudnn-cu11==8.9.4.25 --no-cache-dir  --extra-index-url https://mirrors.aliyun.com/pypi/simple" />
    <string name="Tensor_PIPCmd" value="${{MiniCondaEnvActivate}} &amp;&amp; pip install --pre --extra-index-url https://mirrors.aliyun.com/pypi/simple tensorrt==9.2.0.post12.dev5 --no-cache-dir" />
    <string name="CudnnUninstall_PIPCmd" value="${{MiniCondaEnvActivate}} &amp;&amp; pip uninstall -y nvidia-cudnn-cu11" />

    <string name="IncompatibleNVDriverVersion" value="NVIDIA Graphics Driver ${{MinSupportedDriverVersion}} or above is needed to run &quot;Chat with RTX&quot;."/>
    <string name="IncompatibleVRAMSize" value="&quot;Chat with RTX&quot; requires minimum ${{MinSupportedVRAMSize}} GB of GPU memory to function."/>
    <string name="IncompatibleGpu" value="&quot;Chat with RTX&quot; is supported on Ampere and above GPU family."/>
  </strings>
</nvi>

然后,

重新运行一次,选择清除安装

等待即可,后面还有卡住的地方可以根据提示看看具体哪个依赖装不上,手动安装看看。

还没完,

启动时需要访问huggingface.co,还是需要梯子。这里我还没有办法,靠梯子通过的。

如果没有梯子,报错如下:

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
  File "C:\Users\panda\AppData\Local\NVIDIA\ChatWithRTX\RAG\trt-llm-rag-windows-main\app.py", line 114, in <module>
    embed_model = HuggingFaceEmbeddings(model_name=embedded_model)
  File "C:\Users\panda\AppData\Local\NVIDIA\ChatWithRTX\env_nvd_rag\lib\site-packages\langchain\embeddings\huggingface.py", line 66, in __init__
    self.client = sentence_transformers.SentenceTransformer(
  File "C:\Users\panda\AppData\Local\NVIDIA\ChatWithRTX\env_nvd_rag\lib\site-packages\sentence_transformers\SentenceTransformer.py", line 87, in __init__
    snapshot_download(model_name_or_path,
  File "C:\Users\panda\AppData\Local\NVIDIA\ChatWithRTX\env_nvd_rag\lib\site-packages\sentence_transformers\util.py", line 442, in snapshot_download
    model_info = _api.model_info(repo_id=repo_id, revision=revision, token=token)
  File "C:\Users\panda\AppData\Local\NVIDIA\ChatWithRTX\env_nvd_rag\lib\site-packages\huggingface_hub\utils\_validators.py", line 118, in _inner_fn
    return fn(*args, **kwargs)
  File "C:\Users\panda\AppData\Local\NVIDIA\ChatWithRTX\env_nvd_rag\lib\site-packages\huggingface_hub\hf_api.py", line 2084, in model_info
    r = get_session().get(path, headers=headers, timeout=timeout, params=params)
  File "C:\Users\panda\AppData\Local\NVIDIA\ChatWithRTX\env_nvd_rag\lib\site-packages\requests\sessions.py", line 602, in get
    return self.request("GET", url, **kwargs)
  File "C:\Users\panda\AppData\Local\NVIDIA\ChatWithRTX\env_nvd_rag\lib\site-packages\requests\sessions.py", line 589, in request
    resp = self.send(prep, **send_kwargs)
  File "C:\Users\panda\AppData\Local\NVIDIA\ChatWithRTX\env_nvd_rag\lib\site-packages\requests\sessions.py", line 703, in send
    r = adapter.send(request, **kwargs)
  File "C:\Users\panda\AppData\Local\NVIDIA\ChatWithRTX\env_nvd_rag\lib\site-packages\huggingface_hub\utils\_http.py", line 67, in send
    return super().send(request, *args, **kwargs)
  File "C:\Users\panda\AppData\Local\NVIDIA\ChatWithRTX\env_nvd_rag\lib\site-packages\requests\adapters.py", line 507, in send
    raise ConnectTimeout(e, request=request)
requests.exceptions.ConnectTimeout: (MaxRetryError("HTTPSConnectionPool(host='huggingface.co', port=443): Max retries exceeded with url: /api/models/WhereIsAI/UAE-Large-V1 (Caused by ConnectTimeoutError(<urllib3.connection.HTTPSConnection object at 0x000002C45EDC09D0>, 'Connection to huggingface.co timed out. (connect timeout=None)'))"), '(Request ID: a67d4752-a57c-467e-a8ed-95dae8dbbb96)')
请按任意键继续. . .

如果你用了梯子,能够打开对话窗口,但是马上会有这样一个错误提示

我还看到了另一个错误,提示设置share=true,复现不出来了。

解决方法也简单,在打开了对话窗口之后马上关掉梯子

最后,Chat with RTX试试吧

后记,我的ai model少一个选项,推测是显卡性能不够,另一个模型不能使用。

  • 3
    点赞
  • 6
    收藏
    觉得还不错? 一键收藏
  • 2
    评论
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值