Skip to content

Commit

Permalink
Disable automatic extraction and deduplication for local models (#1831)
Browse files Browse the repository at this point in the history
* Disable automatic extraction and deduplication for local

* Bump js
  • Loading branch information
NolanTrem authored Jan 15, 2025
1 parent f0b474b commit 7ba5105
Show file tree
Hide file tree
Showing 8 changed files with 14 additions and 2 deletions.
2 changes: 1 addition & 1 deletion js/sdk/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion js/sdk/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "r2r-js",
"version": "0.4.16",
"version": "0.4.17",
"description": "",
"main": "dist/index.js",
"browser": "dist/index.browser.js",
Expand Down
2 changes: 2 additions & 0 deletions py/core/configs/full_lm_studio.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ provider = "postgres"
max_knowledge_relationships = 100
max_description_input_length = 65536
generation_config = { model = "lm_studio/llama-3.2-3b-instruct" } # and other params, model used for relationshipt extraction
automatic_deduplication = false

[database.graph_enrichment_settings]
community_reports_prompt = "graphrag_community_reports"
Expand Down Expand Up @@ -60,6 +61,7 @@ combine_under_n_chars = 128
overlap = 20
chunks_for_document_summary = 16
document_summary_model = "lm_studio/llama-3.2-3b-instruct"
automatic_extraction = false

[orchestration]
provider = "hatchet"
2 changes: 2 additions & 0 deletions py/core/configs/full_local_llm.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ provider = "postgres"
max_knowledge_relationships = 100
max_description_input_length = 65536
generation_config = { model = "ollama/llama3.1" } # and other params, model used for relationshipt extraction
automatic_deduplication = false

[database.graph_enrichment_settings]
community_reports_prompt = "graphrag_community_reports"
Expand Down Expand Up @@ -60,6 +61,7 @@ combine_under_n_chars = 128
overlap = 20
chunks_for_document_summary = 16
document_summary_model = "ollama/llama3.1"
automatic_extraction = false

[orchestration]
provider = "hatchet"
2 changes: 2 additions & 0 deletions py/core/configs/full_ollama.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ provider = "postgres"
max_knowledge_relationships = 100
max_description_input_length = 65536
generation_config = { model = "ollama/llama3.1" } # and other params, model used for relationshipt extraction
automatic_deduplication = false

[database.graph_enrichment_settings]
community_reports_prompt = "graphrag_community_reports"
Expand Down Expand Up @@ -60,6 +61,7 @@ combine_under_n_chars = 128
overlap = 20
chunks_for_document_summary = 16
document_summary_model = "ollama/llama3.1"
automatic_extraction = false

[orchestration]
provider = "hatchet"
2 changes: 2 additions & 0 deletions py/core/configs/lm_studio.toml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ provider = "postgres"
max_knowledge_relationships = 100
max_description_input_length = 65536
generation_config = { model = "lm_studio/llama-3.2-3b-instruct" } # and other params, model used for relationshipt extraction
automatic_deduplication = false

[database.graph_enrichment_settings]
community_reports_prompt = "graphrag_community_reports"
Expand All @@ -57,6 +58,7 @@ vision_img_model = "lm_studio/llama3.2-vision"
vision_pdf_model = "lm_studio/llama3.2-vision"
chunks_for_document_summary = 16
document_summary_model = "lm_studio/llama-3.2-3b-instruct"
automatic_extraction = false

[ingestion.extra_parsers]
pdf = "zerox"
2 changes: 2 additions & 0 deletions py/core/configs/local_llm.toml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ provider = "postgres"
max_knowledge_relationships = 100
max_description_input_length = 65536
generation_config = { model = "ollama/llama3.1" } # and other params, model used for relationshipt extraction
automatic_deduplication = false

[database.graph_enrichment_settings]
community_reports_prompt = "graphrag_community_reports"
Expand All @@ -57,6 +58,7 @@ vision_img_model = "ollama/llama3.2-vision"
vision_pdf_model = "ollama/llama3.2-vision"
chunks_for_document_summary = 16
document_summary_model = "ollama/llama3.1"
automatic_extraction = false

[ingestion.extra_parsers]
pdf = "zerox"
2 changes: 2 additions & 0 deletions py/core/configs/ollama.toml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ provider = "postgres"
max_knowledge_relationships = 100
max_description_input_length = 65536
generation_config = { model = "ollama/llama3.1" } # and other params, model used for relationshipt extraction
automatic_deduplication = false

[database.graph_enrichment_settings]
community_reports_prompt = "graphrag_community_reports"
Expand All @@ -57,6 +58,7 @@ vision_img_model = "ollama/llama3.2-vision"
vision_pdf_model = "ollama/llama3.2-vision"
chunks_for_document_summary = 16
document_summary_model = "ollama/llama3.1"
automatic_extraction = false

[ingestion.extra_parsers]
pdf = "zerox"

0 comments on commit 7ba5105

Please sign in to comment.