IFMedTechdemo commited on
Commit
c1e1bba
·
verified ·
1 Parent(s): da048ad

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -53
app.py CHANGED
@@ -96,45 +96,6 @@ except Exception as e:
96
  processor_x = None
97
  print(f"✗ Nanonets-OCR2-3B: Failed to load - {str(e)}")
98
 
99
-
100
-
101
-
102
- # Load Dots.OCR - UPDATED with snapshot_download and device_map="auto"
103
- try:
104
- MODEL_ID_D = "rednote-hilab/dots.ocr"
105
- model_path_d = os.path.join(CACHE_DIR, "dots-ocr-local")
106
-
107
- # Download and cache model locally
108
- snapshot_download(
109
- repo_id=MODEL_ID_D,
110
- local_dir=model_path_d,
111
- local_dir_use_symlinks=False, # Avoid symlink issues on HF Spaces
112
- allow_patterns=["*.json", "*.bin", "*.safetensors", "*.txt"]
113
- )
114
-
115
- processor_d = AutoProcessor.from_pretrained(
116
- model_path_d,
117
- trust_remote_code=True
118
- )
119
-
120
- model_d = AutoModelForCausalLM.from_pretrained(
121
- model_path_d,
122
- attn_implementation="flash_attention_2",
123
- torch_dtype=torch.bfloat16,
124
- device_map="auto", # Better memory management
125
- trust_remote_code=True
126
- ).eval()
127
- print("✓ Dots.OCR loaded")
128
- except Exception as e:
129
- model_d = None
130
- processor_d = None
131
- print(f"✗ Dots.OCR: Failed to load - {str(e)}")
132
- import traceback
133
- traceback.print_exc()
134
-
135
-
136
-
137
-
138
  # Load olmOCR-2-7B-1025
139
  try:
140
  MODEL_ID_M = "allenai/olmOCR-2-7B-1025"
@@ -198,12 +159,6 @@ def generate_image(model_name: str, text: str, image: Image.Image,
198
  return
199
  processor = processor_v
200
  model = model_v
201
- elif model_name == "Dots.OCR":
202
- if model_d is None:
203
- yield "Dots.OCR is not available.", "Dots.OCR is not available."
204
- return
205
- processor = processor_d
206
- model = model_d
207
  else:
208
  yield "Invalid model selected.", "Invalid model selected."
209
  return
@@ -216,8 +171,6 @@ def generate_image(model_name: str, text: str, image: Image.Image,
216
  return
217
 
218
 
219
-
220
-
221
  try:
222
  # Prepare messages in chat format
223
  messages = [{
@@ -319,11 +272,6 @@ if __name__ == "__main__":
319
  if model_v is not None:
320
  available_models.append("Chandra-OCR")
321
  print(" Added: Chandra-OCR")
322
- if model_d is not None:
323
- available_models.append("Dots.OCR")
324
- print(" Added: Dots.OCR")
325
-
326
-
327
  if not available_models:
328
  print("ERROR: No models were loaded successfully!")
329
  exit(1)
@@ -403,7 +351,6 @@ if __name__ == "__main__":
403
  - **olmOCR-2-7B-1025**: Allen AI's OCR model
404
  - **Nanonets-OCR2-3B**: Nanonets OCR model
405
  - **Chandra-OCR**: Datalab OCR model
406
- - **Dots.OCR**: Stranger Vision OCR model (Updated)
407
  """)
408
 
409
 
 
96
  processor_x = None
97
  print(f"✗ Nanonets-OCR2-3B: Failed to load - {str(e)}")
98
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
99
  # Load olmOCR-2-7B-1025
100
  try:
101
  MODEL_ID_M = "allenai/olmOCR-2-7B-1025"
 
159
  return
160
  processor = processor_v
161
  model = model_v
 
 
 
 
 
 
162
  else:
163
  yield "Invalid model selected.", "Invalid model selected."
164
  return
 
171
  return
172
 
173
 
 
 
174
  try:
175
  # Prepare messages in chat format
176
  messages = [{
 
272
  if model_v is not None:
273
  available_models.append("Chandra-OCR")
274
  print(" Added: Chandra-OCR")
 
 
 
 
 
275
  if not available_models:
276
  print("ERROR: No models were loaded successfully!")
277
  exit(1)
 
351
  - **olmOCR-2-7B-1025**: Allen AI's OCR model
352
  - **Nanonets-OCR2-3B**: Nanonets OCR model
353
  - **Chandra-OCR**: Datalab OCR model
 
354
  """)
355
 
356