update
Browse files
app.py
CHANGED
|
@@ -297,7 +297,7 @@ def run_pipeline(arxiv_url, pdf_file, openai_key, logo_files, meeting_logo_file,
|
|
| 297 |
POSTER_LATEX_DIR = WORK_DIR / "posterbuilder" / "latex_proj"
|
| 298 |
|
| 299 |
_write_logs(LOG_PATH, logs)
|
| 300 |
-
yield "\n".join(logs), None
|
| 301 |
|
| 302 |
# ====== Validation: must upload LOGO ======
|
| 303 |
if logo_files is None:
|
|
@@ -310,7 +310,7 @@ def run_pipeline(arxiv_url, pdf_file, openai_key, logo_files, meeting_logo_file,
|
|
| 310 |
msg = "β You must upload at least one institutional logo (multiple allowed)."
|
| 311 |
logs.append(msg)
|
| 312 |
_write_logs(LOG_PATH, logs)
|
| 313 |
-
yield "\n".join(logs), None
|
| 314 |
return
|
| 315 |
|
| 316 |
# Save logos into run-local dir
|
|
@@ -324,7 +324,7 @@ def run_pipeline(arxiv_url, pdf_file, openai_key, logo_files, meeting_logo_file,
|
|
| 324 |
saved_logo_paths.append(p)
|
| 325 |
logs.append(f"π·οΈ Saved {len(saved_logo_paths)} logo file(s) β {LOGO_DIR.relative_to(WORK_DIR)}")
|
| 326 |
_write_logs(LOG_PATH, logs)
|
| 327 |
-
yield "\n".join(logs), None
|
| 328 |
|
| 329 |
# ====== Handle uploaded PDF (optional) ======
|
| 330 |
pdf_path = None
|
|
@@ -339,14 +339,14 @@ def run_pipeline(arxiv_url, pdf_file, openai_key, logo_files, meeting_logo_file,
|
|
| 339 |
canonical_pdf = INPUT_DIR / "paper.pdf"
|
| 340 |
shutil.copy(pdf_file.name, canonical_pdf)
|
| 341 |
_write_logs(LOG_PATH, logs)
|
| 342 |
-
yield "\n".join(logs), None
|
| 343 |
|
| 344 |
# ====== Validate input source ======
|
| 345 |
if not arxiv_url and not pdf_file:
|
| 346 |
msg = "β Please provide either an arXiv link or upload a PDF file (choose one)."
|
| 347 |
logs.append(msg)
|
| 348 |
_write_logs(LOG_PATH, logs)
|
| 349 |
-
yield "\n".join(logs), None
|
| 350 |
return
|
| 351 |
|
| 352 |
# ====== Build command (run INSIDE workspace) ======
|
|
@@ -367,7 +367,7 @@ def run_pipeline(arxiv_url, pdf_file, openai_key, logo_files, meeting_logo_file,
|
|
| 367 |
logs.append("\n======= REAL-TIME LOG =======")
|
| 368 |
logs.append(f"cwd = runs/{WORK_DIR.name}")
|
| 369 |
_write_logs(LOG_PATH, logs)
|
| 370 |
-
yield "\n".join(logs), None
|
| 371 |
|
| 372 |
# ====== Run with REAL-TIME streaming, inside workspace ======
|
| 373 |
try:
|
|
@@ -384,7 +384,7 @@ def run_pipeline(arxiv_url, pdf_file, openai_key, logo_files, meeting_logo_file,
|
|
| 384 |
msg = f"β Pipeline failed to start: {e}"
|
| 385 |
logs.append(msg)
|
| 386 |
_write_logs(LOG_PATH, logs)
|
| 387 |
-
yield "\n".join(logs), None
|
| 388 |
return
|
| 389 |
|
| 390 |
last_yield = time.time()
|
|
@@ -398,7 +398,7 @@ def run_pipeline(arxiv_url, pdf_file, openai_key, logo_files, meeting_logo_file,
|
|
| 398 |
except Exception:
|
| 399 |
pass
|
| 400 |
_write_logs(LOG_PATH, logs)
|
| 401 |
-
yield "\n".join(logs), None
|
| 402 |
return
|
| 403 |
|
| 404 |
line = process.stdout.readline()
|
|
@@ -409,7 +409,7 @@ def run_pipeline(arxiv_url, pdf_file, openai_key, logo_files, meeting_logo_file,
|
|
| 409 |
now = time.time()
|
| 410 |
if now - last_yield >= 0.3:
|
| 411 |
last_yield = now
|
| 412 |
-
yield "\n".join(logs), None
|
| 413 |
elif process.poll() is not None:
|
| 414 |
break
|
| 415 |
else:
|
|
@@ -418,18 +418,18 @@ def run_pipeline(arxiv_url, pdf_file, openai_key, logo_files, meeting_logo_file,
|
|
| 418 |
return_code = process.wait()
|
| 419 |
logs.append(f"\nProcess finished with code {return_code}")
|
| 420 |
_write_logs(LOG_PATH, logs)
|
| 421 |
-
yield "\n".join(logs), None
|
| 422 |
|
| 423 |
if return_code != 0:
|
| 424 |
logs.append("β Process exited with non-zero status. See logs above.")
|
| 425 |
_write_logs(LOG_PATH, logs)
|
| 426 |
-
yield "\n".join(logs), None
|
| 427 |
return
|
| 428 |
|
| 429 |
except Exception as e:
|
| 430 |
logs.append(f"β Error during streaming: {e}")
|
| 431 |
_write_logs(LOG_PATH, logs)
|
| 432 |
-
yield "\n".join(logs), None
|
| 433 |
return
|
| 434 |
finally:
|
| 435 |
try:
|
|
@@ -452,7 +452,7 @@ def run_pipeline(arxiv_url, pdf_file, openai_key, logo_files, meeting_logo_file,
|
|
| 452 |
msg = "β No output generated. Please check logs above."
|
| 453 |
logs.append(msg)
|
| 454 |
_write_logs(LOG_PATH, logs)
|
| 455 |
-
yield "\n".join(logs), None
|
| 456 |
return
|
| 457 |
|
| 458 |
# ====== NEW: Post-processing (optional features) ======
|
|
@@ -469,11 +469,11 @@ def run_pipeline(arxiv_url, pdf_file, openai_key, logo_files, meeting_logo_file,
|
|
| 469 |
_apply_left_logo(OUTPUT_DIR, logo_files, logs)
|
| 470 |
|
| 471 |
_write_logs(LOG_PATH, logs)
|
| 472 |
-
yield "\n".join(logs), None
|
| 473 |
|
| 474 |
|
| 475 |
_write_logs(LOG_PATH, logs)
|
| 476 |
-
yield "\n".join(logs), None
|
| 477 |
|
| 478 |
# ====== Zip output (run-local) ======
|
| 479 |
try:
|
|
|
|
| 297 |
POSTER_LATEX_DIR = WORK_DIR / "posterbuilder" / "latex_proj"
|
| 298 |
|
| 299 |
_write_logs(LOG_PATH, logs)
|
| 300 |
+
yield "\n".join(logs), None, ""
|
| 301 |
|
| 302 |
# ====== Validation: must upload LOGO ======
|
| 303 |
if logo_files is None:
|
|
|
|
| 310 |
msg = "β You must upload at least one institutional logo (multiple allowed)."
|
| 311 |
logs.append(msg)
|
| 312 |
_write_logs(LOG_PATH, logs)
|
| 313 |
+
yield "\n".join(logs), None, ""
|
| 314 |
return
|
| 315 |
|
| 316 |
# Save logos into run-local dir
|
|
|
|
| 324 |
saved_logo_paths.append(p)
|
| 325 |
logs.append(f"π·οΈ Saved {len(saved_logo_paths)} logo file(s) β {LOGO_DIR.relative_to(WORK_DIR)}")
|
| 326 |
_write_logs(LOG_PATH, logs)
|
| 327 |
+
yield "\n".join(logs), None, ""
|
| 328 |
|
| 329 |
# ====== Handle uploaded PDF (optional) ======
|
| 330 |
pdf_path = None
|
|
|
|
| 339 |
canonical_pdf = INPUT_DIR / "paper.pdf"
|
| 340 |
shutil.copy(pdf_file.name, canonical_pdf)
|
| 341 |
_write_logs(LOG_PATH, logs)
|
| 342 |
+
yield "\n".join(logs), None, ""
|
| 343 |
|
| 344 |
# ====== Validate input source ======
|
| 345 |
if not arxiv_url and not pdf_file:
|
| 346 |
msg = "β Please provide either an arXiv link or upload a PDF file (choose one)."
|
| 347 |
logs.append(msg)
|
| 348 |
_write_logs(LOG_PATH, logs)
|
| 349 |
+
yield "\n".join(logs), None, ""
|
| 350 |
return
|
| 351 |
|
| 352 |
# ====== Build command (run INSIDE workspace) ======
|
|
|
|
| 367 |
logs.append("\n======= REAL-TIME LOG =======")
|
| 368 |
logs.append(f"cwd = runs/{WORK_DIR.name}")
|
| 369 |
_write_logs(LOG_PATH, logs)
|
| 370 |
+
yield "\n".join(logs), None, ""
|
| 371 |
|
| 372 |
# ====== Run with REAL-TIME streaming, inside workspace ======
|
| 373 |
try:
|
|
|
|
| 384 |
msg = f"β Pipeline failed to start: {e}"
|
| 385 |
logs.append(msg)
|
| 386 |
_write_logs(LOG_PATH, logs)
|
| 387 |
+
yield "\n".join(logs), None, ""
|
| 388 |
return
|
| 389 |
|
| 390 |
last_yield = time.time()
|
|
|
|
| 398 |
except Exception:
|
| 399 |
pass
|
| 400 |
_write_logs(LOG_PATH, logs)
|
| 401 |
+
yield "\n".join(logs), None, ""
|
| 402 |
return
|
| 403 |
|
| 404 |
line = process.stdout.readline()
|
|
|
|
| 409 |
now = time.time()
|
| 410 |
if now - last_yield >= 0.3:
|
| 411 |
last_yield = now
|
| 412 |
+
yield "\n".join(logs), None, ""
|
| 413 |
elif process.poll() is not None:
|
| 414 |
break
|
| 415 |
else:
|
|
|
|
| 418 |
return_code = process.wait()
|
| 419 |
logs.append(f"\nProcess finished with code {return_code}")
|
| 420 |
_write_logs(LOG_PATH, logs)
|
| 421 |
+
yield "\n".join(logs), None, ""
|
| 422 |
|
| 423 |
if return_code != 0:
|
| 424 |
logs.append("β Process exited with non-zero status. See logs above.")
|
| 425 |
_write_logs(LOG_PATH, logs)
|
| 426 |
+
yield "\n".join(logs), None, ""
|
| 427 |
return
|
| 428 |
|
| 429 |
except Exception as e:
|
| 430 |
logs.append(f"β Error during streaming: {e}")
|
| 431 |
_write_logs(LOG_PATH, logs)
|
| 432 |
+
yield "\n".join(logs), None, ""
|
| 433 |
return
|
| 434 |
finally:
|
| 435 |
try:
|
|
|
|
| 452 |
msg = "β No output generated. Please check logs above."
|
| 453 |
logs.append(msg)
|
| 454 |
_write_logs(LOG_PATH, logs)
|
| 455 |
+
yield "\n".join(logs), None, ""
|
| 456 |
return
|
| 457 |
|
| 458 |
# ====== NEW: Post-processing (optional features) ======
|
|
|
|
| 469 |
_apply_left_logo(OUTPUT_DIR, logo_files, logs)
|
| 470 |
|
| 471 |
_write_logs(LOG_PATH, logs)
|
| 472 |
+
yield "\n".join(logs), None, ""
|
| 473 |
|
| 474 |
|
| 475 |
_write_logs(LOG_PATH, logs)
|
| 476 |
+
yield "\n".join(logs), None, ""
|
| 477 |
|
| 478 |
# ====== Zip output (run-local) ======
|
| 479 |
try:
|