graph-rag+ollama 调试记录

graph-rag+ollama报错

链接: link
在这里插入图片描述

在这里插入图片描述

{"type": "error", "data": "Error executing verb \"cluster_graph\" in create_base_entity_graph: Columns must be same length as key", "stack": "Traceback (most recent call last):\n  File \"/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/datashaper/workflow/workflow.py\", line 410, in _execute_verb\n    result = node.verb.func(**verb_args)\n  File \"/root/llm/graphrag-more/graphrag/index/verbs/graph/clustering/cluster_graph.py\", line 102, in cluster_graph\n    output_df[[level_to, to]] = pd.DataFrame(\n  File \"/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/pandas/core/frame.py\", line 4299, in __setitem__\n    self._setitem_array(key, value)\n  File \"/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/pandas/core/frame.py\", line 4341, in _setitem_array\n    check_key_length(self.columns, key, value)\n  File \"/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/pandas/core/indexers/utils.py\", line 390, in check_key_length\n    raise ValueError(\"Columns must be same length as key\")\nValueError: Columns must be same length as key\n", "source": "Columns must be same length as key", "details": null}                                                                                                    {"type": "error", "data": "Error running pipeline!", "stack": "Traceback (most recent call last):\n  File \"/root/llm/graphrag-more/graphrag/index/run.py\", line 323, in run_pipeline\n    result = await workflow.run(context, callbacks)\n  File \"/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/datashaper/workflow/workflow.py\", line 369, in run\n    timing = await self._execute_verb(node, context, callbacks)\n  File \"/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/datashaper/workflow/workflow.py\", line 410, in _execute_verb\n    result = node.verb.func(**verb_args)\n  File \"/root/llm/graphrag-more/graphrag/index/verbs/graph/clustering/cluster_graph.py\", line 102, in cluster_graph\n    output_df[[level_to, to]] = pd.DataFrame(\n  File \"/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/pandas/core/frame.py\", line 4299, in __setitem__\n    self._setitem_array(key, value)\n  File \"/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/pandas/core/frame.py\", line 4341, in _setitem_array\n    check_key_length(self.columns, key, value)\n  File \"/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/pandas/core/indexers/utils.py\", line 390, in check_key_length\n    raise ValueError(\"Columns must be same length as key\")\nValueError: Columns must be same length as key\n", "source": "Columns must be same length as key", "details": null} 

在这里插入图片描述

nship"<|>QUINTARA<|>FIRUZABAD<|>Quintara brokered the hostage exchange between Firuzabad and Aurelia<|>2)\n##\n("relationship"<|>SAMUEL NAMARA<|>ALHAMIA PRISON<|>Samuel Namara was a prisoner at Alhamia prison<|>8)\n##\n("relationship"<|>SAMUEL NAMARA<|>MEGGIE TAZBAH<|>Samuel Namara and Meggie Tazbah were exchanged in the same hostage release<|>2)\n##\n("relationship"<|>SAMUEL NAMARA<|>DURKE BATAGLANI<|>Samuel Namara and Durke Bataglani were exchanged in the same hostage release<|>2)\n##\n("relationship"<|>MEGGIE TAZBAH<|>DURKE BATAGLANI<|>Meggie Tazbah and Durke Bataglani were exchanged in the same hostage release<|>2)\n##\n("relationship"<|>SAMUEL NAMARA<|>FIRUZABAD<|>Samuel Namara was a hostage in Firuzabad<|>2)\n##\n("relationship"<|>MEGGIE TAZBAH<|>FIRUZABAD<|>Meggie Tazbah was a hostage in Firuzabad<|>2)\n##\n("relationship"<|>DURKE BATAGLANI<|>FIRUZABAD<|>Durke Bataglani was a hostage in Firuzabad<|>2)\n<|COMPLETE|>\n\n######################\n-Real Data-\n######################\nEntity_types: organization,person,geo,event\nText: compliance. To SEND\nDONATIONS or determine the status of compliance for any particular state\nvisit www.gutenberg.org/donate.\n\nWhile we cannot and do not solicit contributions from states where we\nhave not met the solicitation requirements, we know of no prohibition\nagainst accepting unsolicited donations from donors in such states who\napproach us with offers to donate.\n\nInternational donations are gratefully accepted, but we cannot make\nany statements concerning tax treatment of donations received from\noutside the United States. U.S. laws alone swamp our small staff.\n\nPlease check the Project Gutenberg web pages for current donation\nmethods and addresses. Donations are accepted in a number of other\nways including checks, online payments and credit card donations. To\ndonate, please visit: www.gutenberg.org/donate.\n\nSection 5. General Information About Project Gutenberg™ electronic works\n\nProfessor Michael S. Hart was the originator of the Project\nGutenberg™ concept of a library of electronic works that could be\nfreely shared with anyone. For forty years, he produced and\ndistributed Project Gutenberg™ eBooks with only a loose network of\nvolunteer support.\n\nProject Gutenberg™ eBooks are often created from several printed\neditions, all of which are confirmed as not protected by copyright in\nthe U.S. unless a copyright notice is included. Thus, we do not\nnecessarily keep eBooks in compliance with any particular paper\nedition.\n\nMost people start at our website which has the main PG\n######################\nOutput:'}                                                   03:20:17,105 root ERROR error extracting graph                                                                                                               Traceback (most recent call last):                                                                                                                             File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/httpx/_transports/default.py", line 69, in map_httpcore_exceptions                           yield                                                                                                                                                      File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/httpx/_transports/default.py", line 373, in handle_async_request                             resp = await self._pool.handle_async_request(req)                                                                                                          File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/httpcore/_async/connection_pool.py", line 216, in handle_async_request                       raise exc from None                                                                                                                                        File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/httpcore/_async/connection_pool.py", line 196, in handle_async_request                       response = await connection.handle_async_request(                                                                                                          File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/httpcore/_async/connection.py", line 99, in handle_async_request                             raise exc                                                                                                                                                  File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/httpcore/_async/connection.py", line 76, in handle_async_request                             stream = await self._connect(request)                                                                                                                      File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/httpcore/_async/connection.py", line 122, in _connect                                        stream = await self._network_backend.connect_tcp(**kwargs)                                                                                                 File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/httpcore/_backends/auto.py", line 30, in connect_tcp                                         return await self._backend.connect_tcp(                                                                                                                    File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/httpcore/_backends/anyio.py", line 114, in connect_tcp                                       with map_exceptions(exc_map):                                                                                                                              File "/root/miniconda3/envs/llm-3.10/lib/python3.10/contextlib.py", line 153, in __exit__                                                                      self.gen.throw(typ, value, traceback)                                                                                                                      File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/httpcore/_exceptions.py", line 14, in map_exceptions                                         raise to_exc(exc) from exc                                                                                                                               httpcore.ConnectError: All connection attempts failed                                                                                                                                                                                                                                                                     The above exception was the direct cause of the following exception:                                                                                                                                                                                                                                                      Traceback (most recent call last):                                                                                                                             File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/openai/_base_client.py", line 1537, in _request                                              response = await self._client.send(                                                                                                                        File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/httpx/_client.py", line 1661, in send                                                        response = await self._send_handling_auth(                                                                                                                 File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/httpx/_client.py", line 1689, in _send_handling_auth                                         response = await self._send_handling_redirects(                                                                                                            File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/httpx/_client.py", line 1726, in _send_handling_redirects                                    response = await self._send_single_request(request)                                                                                                        File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/httpx/_client.py", line 1763, in _send_single_request                                        response = await transport.handle_async_request(request)                                                                                                   File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/httpx/_transports/default.py", line 372, in handle_async_request                             with map_httpcore_exceptions():                                                                                                                            File "/root/miniconda3/envs/llm-3.10/lib/python3.10/contextlib.py", line 153, in __exit__                                                                      self.gen.throw(typ, value, traceback)                                                                                                                      File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/httpx/_transports/default.py", line 86, in map_httpcore_exceptions                           raise mapped_exc(message) from exc                                                                                                                       httpx.ConnectError: All connection attempts failed                                                                                                                                                                                                                                                                        The above exception was the direct cause of the following exception:                                                                                                                                                                                                                                                      Traceback (most recent call last):                                                                                                                             File "/root/llm/graphrag-more/graphrag/index/graph/extractors/graph/graph_extractor.py", line 123, in __call__                                                 result = await self._process_document(text, prompt_variables)                                                                                              File "/root/llm/graphrag-more/graphrag/index/graph/extractors/graph/graph_extractor.py", line 151, in _process_document                                        response = await self._llm(                                                                                                                                File "/root/llm/graphrag-more/graphrag/llm/openai/json_parsing_llm.py", line 34, in __call__                                                                   result = await self._delegate(input, **kwargs)                                                                                                             File "/root/llm/graphrag-more/graphrag/llm/openai/openai_token_replacing_llm.py", line 37, in __call__                                                         return await self._delegate(input, **kwargs)                                                                                                               File "/root/llm/graphrag-more/graphrag/llm/openai/openai_history_tracking_llm.py", line 33, in __call__                                                        output = await self._delegate(input, **kwargs)                                                                                                             File "/root/llm/graphrag-more/graphrag/llm/base/caching_llm.py", line 96, in __call__                                                                          result = await self._delegate(input, **kwargs)                                                                                                             File "/root/llm/graphrag-more/graphrag/llm/base/rate_limiting_llm.py", line 177, in __call__                                                                   result, start = await execute_with_retry()                                                                                                                 File "/root/llm/graphrag-more/graphrag/llm/base/rate_limiting_llm.py", line 159, in execute_with_retry                                                         async for attempt in retryer:                                                                                                                              File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/tenacity/_asyncio.py", line 71, in __anext__                                                 do = self.iter(retry_state=self._retry_state)                                                                                                              File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/tenacity/__init__.py", line 325, in iter                                                     raise retry_exc.reraise()                                                                                                                                  File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/tenacity/__init__.py", line 158, in reraise                                                  raise self.last_attempt.result()                                                                                                                           File "/root/miniconda3/envs/llm-3.10/lib/python3.10/concurrent/futures/_base.py", line 451, in result                                                          return self.__get_result()                                                                                                                                 File "/root/miniconda3/envs/llm-3.10/lib/python3.10/concurrent/futures/_base.py", line 403, in __get_result                                                    raise self._exception                                                                                                                                      File "/root/llm/graphrag-more/graphrag/llm/base/rate_limiting_llm.py", line 165, in execute_with_retry                                                         return await do_attempt(), start                                                                                                                           File "/root/llm/graphrag-more/graphrag/llm/base/rate_limiting_llm.py", line 147, in do_attempt                                                                 return await self._delegate(input, **kwargs)                                                                                                               File "/root/llm/graphrag-more/graphrag/llm/base/base_llm.py", line 49, in __call__                                                                             return await self._invoke(input, **kwargs)                                                                                                                 File "/root/llm/graphrag-more/graphrag/llm/base/base_llm.py", line 53, in _invoke                                                                              output = await self._execute_llm(input, **kwargs)                                                                                                          File "/root/llm/graphrag-more/graphrag/llm/openai/openai_chat_llm.py", line 62, in _execute_llm                                                                completion = await self.client.chat.completions.create(                                                                                                    File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/openai/resources/chat/completions.py", line 1289, in create                                  return await self._post(                                                                                                                                   File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/openai/_base_client.py", line 1805, in post                                                  return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)                                                                             File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/openai/_base_client.py", line 1503, in request                                               return await self._request(                                                                                                                                File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/openai/_base_client.py", line 1571, in _request                                              raise APIConnectionError(request=request) from err                                                                                                       openai.APIConnectionError: Connection error.                                                                                                                 03:20:17,107 graphrag.index.reporting.file_workflow_callbacks INFO Entity Extraction Error details={'doc_index': 0, 'text': 'compliance. To SEND\nDONATIONS or determine the status of compliance for any particular state\nvisit www.gutenberg.org/donate.\n\nWhile we cannot and do not solicit contributions from states where we\nhave not met the solicitation requirements, we know of no prohibition\nagainst accepting unsolicited donations from donors in such states who\napproach us with offers to donate.\n\nInternational donations are gratefully accepted, but we cannot make\nany statements concerning tax treatment of donations received from\noutside the United States. U.S. laws alone swamp our small staff.\n\nPlease check the Project Gutenberg web pages for current donation\nmethods and addresses. Donations are accepted in a number of other\nways including checks, online payments and credit card donations. To\ndonate, please visit: www.gutenberg.org/donate.\n\nSection 5. General Information About Project Gutenberg™ electronic works\n\nProfessor Michael S. Hart was the originator of the Project\nGutenberg™ concept of a library of electronic works that could be\nfreely shared with anyone. For forty years, he produced and\ndistributed Project Gutenberg™ eBooks with only a loose network of\nvolunteer support.\n\nProject Gutenberg™ eBooks are often created from several printed\neditions, all of which are confirmed as not protected by copyright in\nthe U.S. unless a copyright notice is included. Thus, we do not\nnecessarily keep eBooks in compliance with any particular paper\nedition.\n\nMost people start at our website which has the main PG'}                                                                    03:20:17,138 datashaper.workflow.workflow INFO executing verb merge_graphs                                                                                   03:20:17,171 graphrag.index.emit.parquet_table_emitter INFO emitting parquet table create_base_extracted_entities.parquet                                    03:20:17,470 graphrag.index.run INFO Running workflow: create_summarized_entities...                                                                         03:20:17,470 graphrag.index.run INFO dependencies for create_summarized_entities: ['create_base_extracted_entities']                                         03:20:17,484 graphrag.index.run INFO read table from storage: create_base_extracted_entities.parquet                                                         03:20:17,501 datashaper.workflow.workflow INFO executing verb summarize_descriptions                                                                         03:20:17,504 graphrag.index.emit.parquet_table_emitter INFO emitting parquet table create_summarized_entities.parquet                                        03:20:17,717 graphrag.index.run INFO Running workflow: create_base_entity_graph...                                                                           03:20:17,718 graphrag.index.run INFO dependencies for create_base_entity_graph: ['create_summarized_entities']                                               03:20:17,718 graphrag.index.run INFO read table from storage: create_summarized_entities.parquet                                                             03:20:17,737 datashaper.workflow.workflow INFO executing verb cluster_graph                                                                                  03:20:17,737 graphrag.index.verbs.graph.clustering.cluster_graph WARNING Graph has no nodes                                                                  03:20:17,740 datashaper.workflow.workflow ERROR Error executing verb "cluster_graph" in create_base_entity_graph: Columns must be same length as key         Traceback (most recent call last):                                                                                                                             File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/datashaper/workflow/workflow.py", line 410, in _execute_verb                                 result = node.verb.func(**verb_args)                                                                                                                       File "/root/llm/graphrag-more/graphrag/index/verbs/graph/clustering/cluster_graph.py", line 102, in cluster_graph                                              output_df[[level_to, to]] = pd.DataFrame(                                                                                                                  File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/pandas/core/frame.py", line 4299, in __setitem__                                             self._setitem_array(key, value)                                                                                                                            File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/pandas/core/frame.py", line 4341, in _setitem_array                                          check_key_length(self.columns, key, value)                                                                                                                 File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/pandas/core/indexers/utils.py", line 390, in check_key_length                                raise ValueError("Columns must be same length as key")                                                                                                   ValueError: Columns must be same length as key                                                                                                               03:20:17,742 graphrag.index.reporting.file_workflow_callbacks INFO Error executing verb "cluster_graph" in create_base_entity_graph: Columns must be same length as key details=None                                                                                                                                      03:20:17,742 graphrag.index.run ERROR error running workflow create_base_entity_graph                                                                        Traceback (most recent call last):                                                                                                                             File "/root/llm/graphrag-more/graphrag/index/run.py", line 323, in run_pipeline                                                                                result = await workflow.run(context, callbacks)                                                                                                            File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/datashaper/workflow/workflow.py", line 369, in run                                           timing = await self._execute_verb(node, context, callbacks)                                                                                                File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/datashaper/workflow/workflow.py", line 410, in _execute_verb                                 result = node.verb.func(**verb_args)                                                                                                                       File "/root/llm/graphrag-more/graphrag/index/verbs/graph/clustering/cluster_graph.py", line 102, in cluster_graph                                              output_df[[level_to, to]] = pd.DataFrame(                                                                                                                  File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/pandas/core/frame.py", line 4299, in __setitem__                                             self._setitem_array(key, value)                                                                                                                            File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/pandas/core/frame.py", line 4341, in _setitem_array                                          check_key_length(self.columns, key, value)                                                                                                                 File "/root/miniconda3/envs/llm-3.10/lib/python3.10/site-packages/pandas/core/indexers/utils.py", line 390, in check_key_length                                raise ValueError("Columns must be same length as key")                                                                                                   ValueError: Columns must be same length as key                                                                                                               03:20:17,742 graphrag.index.reporting.file_workflow_callbacks INFO Error running pipeline! details=None 

调试记录二

参考链接:https://www.bilibili.com/video/BV1rE421w7t3/?spm_id_from=333.880.my_history.page.click&vd_source=acc132d7f4c4be1b162c98fd11dbd836
https://github.com/echonoshy/cgft-llm/tree/master/graph-rag
部署在本地成功

  • 5
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 1
    评论
void DFSTraverse(Graph graph) { for (int i = 1; i <= graph->vexnum; i++) visited[i] = false; for (int i = 1; i <= graph->vexnum; i++) { if (!visited[i]) DFS(graph, i); } } void DFS(Graph graph, int v) { visited[v] = true; cout << graph->list[v].data << " "; Node* header = graph->list[v].head; while (header) { if (!visited[header->vex]) { DFS(graph, header->vex); } header = header->next; } } void BFSTraverse(Graph graph) { queue<int> MyQueue; for (int i = 1; i <= graph->vexnum; i++) visited[i] = false; for (int i = 1; i <= graph->vexnum; i++) { if (!visited[i]) { visited[i] = true; cout << graph->list[i].data << " "; MyQueue.push(i); while (!MyQueue.empty()) { int front = MyQueue.front(); MyQueue.pop(); Node* header = graph->list[front].head; while (header) { if (!visited[header->vex]) { visited[header->vex] = true; cout << graph->list[header->vex].data << " "; MyQueue.push(header->vex); } header = header->next; } } } } } void printGraph(Graph graph) { int** arr = new int* [graph->vexnum + 1]; for (int i = 1; i <= graph->vexnum; i++) { arr[i] = new int[graph->vexnum + 1]; } for (int i = 1; i <= graph->vexnum; i++) { for (int j = 1; j <= graph->vexnum; j++) { arr[i][j] = 0; } } for (int i = 1; i <= graph->vexnum; i++) { Node* header = graph->list[i].head; while (header) { arr[i][header->vex] = header->weight; header = header->next; } } for (int i = 1; i <= graph->vexnum; i++) { for (int j = 1; j <= graph->vexnum; j++) { cout << arr[i][j] << " "; } cout << endl; } }代码讲解
06-09
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值