-
Notifications
You must be signed in to change notification settings - Fork 12
Expand file tree
/
Copy path.env.example
More file actions
86 lines (71 loc) · 2.67 KB
/
.env.example
File metadata and controls
86 lines (71 loc) · 2.67 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
# ============================================
# LLM API Configuration
# ============================================
# Setup your LLM API config
# For OpenAI: https://platform.openai.com/docs/api-reference
# For DeepSeek: https://api-docs.deepseek.com/
# For other providers: check their documentation
OPENAI_API_KEY=sk-xxxxxx
OPENAI_BASE_URL=https://api.openai.com/v1
# Choose device to run AI models (cpu, cuda:0, cuda:1 etc.)
DEVICE=xxx
# Embedding model config, Qwen/Qwen3-Embedding-0.6B or BAAI/bge-large-zh-v1.5
EMBEDDING_MODEL_NAME=Qwen/Qwen3-Embedding-0.6B
# If you use Azure OpenAI, uncomment below and fill in your info
# API_VERSION=2025-01-01-preview
# OPENAI_PROVIDER=azure
# ============================================
# PostgreSQL Database Configuration
# ============================================
POSTGRES_HOST=localhost
POSTGRES_PORT=5555
POSTGRES_USER=postgres
POSTGRES_PASSWORD=123
POSTGRES_DB=rag_arc
# ============================================
# Redis Cache Configuration
# ============================================
REDIS_HOST=localhost
REDIS_PORT=6379
REDIS_DB=0
REDIS_PASSWORD=
# ============================================
# Application Configuration
# ============================================
# JWT Secret Key - Run: openssl rand -hex 32
JWT_SECRET_KEY=your-secret-key-change-this-in-production
# HuggingFace Token (optional, for downloading models)
HF_TOKEN=
# Log Level (DEBUG, INFO, WARNING, ERROR)
LOG_LEVEL=INFO
# ============================================
# File Storage Configuration
# ============================================
# Unified parser output directory (all parsers output to subdirectories here)
# Structure: {PARSER_OUTPUT_DIR}/native/, {PARSER_OUTPUT_DIR}/dots_ocr/, {PARSER_OUTPUT_DIR}/vlm_ocr/
PARSER_OUTPUT_DIR=./data/parsed_files
# Local file storage path
LOCAL_FILE_STORAGE_PATH=./local/files
# ============================================
# Neo4j Graph Database Configuration
# ============================================
# Neo4j connection (auto-configured by Docker deployment)
NEO4J_URL=bolt://localhost:7687
NEO4J_USERNAME=neo4j
NEO4J_PASSWORD=12345678
NEO4J_DATABASE=neo4j
# Docker: Expose Neo4j ports to host (default: false for security)
# Set to true if you want to access Neo4j Browser at http://localhost:7474
EXPOSE_NEO4J=false
# Docker: Neo4j port mapping (only used if EXPOSE_NEO4J=true)
NEO4J_HTTP_PORT=7474
NEO4J_BOLT_PORT=7687
# ============================================
# Optional: MinIO Object Storage
# ============================================
# Uncomment if you want to use MinIO
# MINIO_ENDPOINT=localhost:9000
# MINIO_USERNAME=ROOTNAME
# MINIO_PASSWORD=CHANGEME123
# MINIO_BUCKET=rag-arc-bucket
# MINIO_SECURE=false