make logging not use f-str, change others to f-str (#22882)
This commit is contained in:
@@ -22,19 +22,19 @@ def add_document_to_index_task(dataset_document_id: str):
|
||||
|
||||
Usage: add_document_to_index_task.delay(dataset_document_id)
|
||||
"""
|
||||
logging.info(click.style("Start add document to index: {}".format(dataset_document_id), fg="green"))
|
||||
logging.info(click.style(f"Start add document to index: {dataset_document_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
dataset_document = db.session.query(DatasetDocument).where(DatasetDocument.id == dataset_document_id).first()
|
||||
if not dataset_document:
|
||||
logging.info(click.style("Document not found: {}".format(dataset_document_id), fg="red"))
|
||||
logging.info(click.style(f"Document not found: {dataset_document_id}", fg="red"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
if dataset_document.indexing_status != "completed":
|
||||
return
|
||||
|
||||
indexing_cache_key = "document_{}_indexing".format(dataset_document.id)
|
||||
indexing_cache_key = f"document_{dataset_document.id}_indexing"
|
||||
|
||||
try:
|
||||
dataset = dataset_document.dataset
|
||||
@@ -101,9 +101,7 @@ def add_document_to_index_task(dataset_document_id: str):
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style(
|
||||
"Document added to index: {} latency: {}".format(dataset_document.id, end_at - start_at), fg="green"
|
||||
)
|
||||
click.style(f"Document added to index: {dataset_document.id} latency: {end_at - start_at}", fg="green")
|
||||
)
|
||||
except Exception as e:
|
||||
logging.exception("add document to index failed")
|
||||
|
||||
@@ -25,7 +25,7 @@ def add_annotation_to_index_task(
|
||||
|
||||
Usage: clean_dataset_task.delay(dataset_id, tenant_id, indexing_technique, index_struct)
|
||||
"""
|
||||
logging.info(click.style("Start build index for annotation: {}".format(annotation_id), fg="green"))
|
||||
logging.info(click.style(f"Start build index for annotation: {annotation_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
@@ -50,7 +50,7 @@ def add_annotation_to_index_task(
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style(
|
||||
"Build index successful for annotation: {} latency: {}".format(annotation_id, end_at - start_at),
|
||||
f"Build index successful for annotation: {annotation_id} latency: {end_at - start_at}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -25,9 +25,9 @@ def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id:
|
||||
:param user_id: user_id
|
||||
|
||||
"""
|
||||
logging.info(click.style("Start batch import annotation: {}".format(job_id), fg="green"))
|
||||
logging.info(click.style(f"Start batch import annotation: {job_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
indexing_cache_key = "app_annotation_batch_import_{}".format(str(job_id))
|
||||
indexing_cache_key = f"app_annotation_batch_import_{str(job_id)}"
|
||||
# get app info
|
||||
app = db.session.query(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first()
|
||||
|
||||
@@ -85,7 +85,7 @@ def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id:
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
redis_client.setex(indexing_cache_key, 600, "error")
|
||||
indexing_error_msg_key = "app_annotation_batch_import_error_msg_{}".format(str(job_id))
|
||||
indexing_error_msg_key = f"app_annotation_batch_import_error_msg_{str(job_id)}"
|
||||
redis_client.setex(indexing_error_msg_key, 600, str(e))
|
||||
logging.exception("Build index for batch import annotations failed")
|
||||
finally:
|
||||
|
||||
@@ -15,7 +15,7 @@ def delete_annotation_index_task(annotation_id: str, app_id: str, tenant_id: str
|
||||
"""
|
||||
Async delete annotation index task
|
||||
"""
|
||||
logging.info(click.style("Start delete app annotation index: {}".format(app_id), fg="green"))
|
||||
logging.info(click.style(f"Start delete app annotation index: {app_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
try:
|
||||
dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding_by_id_and_type(
|
||||
@@ -35,9 +35,7 @@ def delete_annotation_index_task(annotation_id: str, app_id: str, tenant_id: str
|
||||
except Exception:
|
||||
logging.exception("Delete annotation index failed when annotation deleted.")
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style("App annotations index deleted : {} latency: {}".format(app_id, end_at - start_at), fg="green")
|
||||
)
|
||||
logging.info(click.style(f"App annotations index deleted : {app_id} latency: {end_at - start_at}", fg="green"))
|
||||
except Exception as e:
|
||||
logging.exception("Annotation deleted index failed")
|
||||
finally:
|
||||
|
||||
@@ -16,25 +16,25 @@ def disable_annotation_reply_task(job_id: str, app_id: str, tenant_id: str):
|
||||
"""
|
||||
Async enable annotation reply task
|
||||
"""
|
||||
logging.info(click.style("Start delete app annotations index: {}".format(app_id), fg="green"))
|
||||
logging.info(click.style(f"Start delete app annotations index: {app_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
# get app info
|
||||
app = db.session.query(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first()
|
||||
annotations_count = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app_id).count()
|
||||
if not app:
|
||||
logging.info(click.style("App not found: {}".format(app_id), fg="red"))
|
||||
logging.info(click.style(f"App not found: {app_id}", fg="red"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
app_annotation_setting = db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first()
|
||||
|
||||
if not app_annotation_setting:
|
||||
logging.info(click.style("App annotation setting not found: {}".format(app_id), fg="red"))
|
||||
logging.info(click.style(f"App annotation setting not found: {app_id}", fg="red"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
disable_app_annotation_key = "disable_app_annotation_{}".format(str(app_id))
|
||||
disable_app_annotation_job_key = "disable_app_annotation_job_{}".format(str(job_id))
|
||||
disable_app_annotation_key = f"disable_app_annotation_{str(app_id)}"
|
||||
disable_app_annotation_job_key = f"disable_app_annotation_job_{str(job_id)}"
|
||||
|
||||
try:
|
||||
dataset = Dataset(
|
||||
@@ -57,13 +57,11 @@ def disable_annotation_reply_task(job_id: str, app_id: str, tenant_id: str):
|
||||
db.session.commit()
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style("App annotations index deleted : {} latency: {}".format(app_id, end_at - start_at), fg="green")
|
||||
)
|
||||
logging.info(click.style(f"App annotations index deleted : {app_id} latency: {end_at - start_at}", fg="green"))
|
||||
except Exception as e:
|
||||
logging.exception("Annotation batch deleted index failed")
|
||||
redis_client.setex(disable_app_annotation_job_key, 600, "error")
|
||||
disable_app_annotation_error_key = "disable_app_annotation_error_{}".format(str(job_id))
|
||||
disable_app_annotation_error_key = f"disable_app_annotation_error_{str(job_id)}"
|
||||
redis_client.setex(disable_app_annotation_error_key, 600, str(e))
|
||||
finally:
|
||||
redis_client.delete(disable_app_annotation_key)
|
||||
|
||||
@@ -27,19 +27,19 @@ def enable_annotation_reply_task(
|
||||
"""
|
||||
Async enable annotation reply task
|
||||
"""
|
||||
logging.info(click.style("Start add app annotation to index: {}".format(app_id), fg="green"))
|
||||
logging.info(click.style(f"Start add app annotation to index: {app_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
# get app info
|
||||
app = db.session.query(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first()
|
||||
|
||||
if not app:
|
||||
logging.info(click.style("App not found: {}".format(app_id), fg="red"))
|
||||
logging.info(click.style(f"App not found: {app_id}", fg="red"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
annotations = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app_id).all()
|
||||
enable_app_annotation_key = "enable_app_annotation_{}".format(str(app_id))
|
||||
enable_app_annotation_job_key = "enable_app_annotation_job_{}".format(str(job_id))
|
||||
enable_app_annotation_key = f"enable_app_annotation_{str(app_id)}"
|
||||
enable_app_annotation_job_key = f"enable_app_annotation_job_{str(job_id)}"
|
||||
|
||||
try:
|
||||
documents = []
|
||||
@@ -68,7 +68,7 @@ def enable_annotation_reply_task(
|
||||
try:
|
||||
old_vector.delete()
|
||||
except Exception as e:
|
||||
logging.info(click.style("Delete annotation index error: {}".format(str(e)), fg="red"))
|
||||
logging.info(click.style(f"Delete annotation index error: {str(e)}", fg="red"))
|
||||
annotation_setting.score_threshold = score_threshold
|
||||
annotation_setting.collection_binding_id = dataset_collection_binding.id
|
||||
annotation_setting.updated_user_id = user_id
|
||||
@@ -104,18 +104,16 @@ def enable_annotation_reply_task(
|
||||
try:
|
||||
vector.delete_by_metadata_field("app_id", app_id)
|
||||
except Exception as e:
|
||||
logging.info(click.style("Delete annotation index error: {}".format(str(e)), fg="red"))
|
||||
logging.info(click.style(f"Delete annotation index error: {str(e)}", fg="red"))
|
||||
vector.create(documents)
|
||||
db.session.commit()
|
||||
redis_client.setex(enable_app_annotation_job_key, 600, "completed")
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style("App annotations added to index: {} latency: {}".format(app_id, end_at - start_at), fg="green")
|
||||
)
|
||||
logging.info(click.style(f"App annotations added to index: {app_id} latency: {end_at - start_at}", fg="green"))
|
||||
except Exception as e:
|
||||
logging.exception("Annotation batch created index failed")
|
||||
redis_client.setex(enable_app_annotation_job_key, 600, "error")
|
||||
enable_app_annotation_error_key = "enable_app_annotation_error_{}".format(str(job_id))
|
||||
enable_app_annotation_error_key = f"enable_app_annotation_error_{str(job_id)}"
|
||||
redis_client.setex(enable_app_annotation_error_key, 600, str(e))
|
||||
db.session.rollback()
|
||||
finally:
|
||||
|
||||
@@ -25,7 +25,7 @@ def update_annotation_to_index_task(
|
||||
|
||||
Usage: clean_dataset_task.delay(dataset_id, tenant_id, indexing_technique, index_struct)
|
||||
"""
|
||||
logging.info(click.style("Start update index for annotation: {}".format(annotation_id), fg="green"))
|
||||
logging.info(click.style(f"Start update index for annotation: {annotation_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
@@ -51,7 +51,7 @@ def update_annotation_to_index_task(
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style(
|
||||
"Build index successful for annotation: {} latency: {}".format(annotation_id, end_at - start_at),
|
||||
f"Build index successful for annotation: {annotation_id} latency: {end_at - start_at}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -49,7 +49,8 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
|
||||
except Exception:
|
||||
logging.exception(
|
||||
"Delete image_files failed when storage deleted, \
|
||||
image_upload_file_is: {}".format(upload_file_id)
|
||||
image_upload_file_is: %s",
|
||||
upload_file_id,
|
||||
)
|
||||
db.session.delete(image_file)
|
||||
db.session.delete(segment)
|
||||
@@ -61,14 +62,14 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
|
||||
try:
|
||||
storage.delete(file.key)
|
||||
except Exception:
|
||||
logging.exception("Delete file failed when document deleted, file_id: {}".format(file.id))
|
||||
logging.exception("Delete file failed when document deleted, file_id: %s", file.id)
|
||||
db.session.delete(file)
|
||||
db.session.commit()
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style(
|
||||
"Cleaned documents when documents deleted latency: {}".format(end_at - start_at),
|
||||
f"Cleaned documents when documents deleted latency: {end_at - start_at}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -37,10 +37,10 @@ def batch_create_segment_to_index_task(
|
||||
|
||||
Usage: batch_create_segment_to_index_task.delay(job_id, content, dataset_id, document_id, tenant_id, user_id)
|
||||
"""
|
||||
logging.info(click.style("Start batch create segment jobId: {}".format(job_id), fg="green"))
|
||||
logging.info(click.style(f"Start batch create segment jobId: {job_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
indexing_cache_key = "segment_batch_import_{}".format(job_id)
|
||||
indexing_cache_key = f"segment_batch_import_{job_id}"
|
||||
|
||||
try:
|
||||
with Session(db.engine) as session:
|
||||
@@ -115,7 +115,7 @@ def batch_create_segment_to_index_task(
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style(
|
||||
"Segment batch created job: {} latency: {}".format(job_id, end_at - start_at),
|
||||
f"Segment batch created job: {job_id} latency: {end_at - start_at}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -42,7 +42,7 @@ def clean_dataset_task(
|
||||
|
||||
Usage: clean_dataset_task.delay(dataset_id, tenant_id, indexing_technique, index_struct)
|
||||
"""
|
||||
logging.info(click.style("Start clean dataset when dataset deleted: {}".format(dataset_id), fg="green"))
|
||||
logging.info(click.style(f"Start clean dataset when dataset deleted: {dataset_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
@@ -57,9 +57,9 @@ def clean_dataset_task(
|
||||
segments = db.session.query(DocumentSegment).where(DocumentSegment.dataset_id == dataset_id).all()
|
||||
|
||||
if documents is None or len(documents) == 0:
|
||||
logging.info(click.style("No documents found for dataset: {}".format(dataset_id), fg="green"))
|
||||
logging.info(click.style(f"No documents found for dataset: {dataset_id}", fg="green"))
|
||||
else:
|
||||
logging.info(click.style("Cleaning documents for dataset: {}".format(dataset_id), fg="green"))
|
||||
logging.info(click.style(f"Cleaning documents for dataset: {dataset_id}", fg="green"))
|
||||
# Specify the index type before initializing the index processor
|
||||
if doc_form is None:
|
||||
raise ValueError("Index type must be specified.")
|
||||
@@ -80,7 +80,8 @@ def clean_dataset_task(
|
||||
except Exception:
|
||||
logging.exception(
|
||||
"Delete image_files failed when storage deleted, \
|
||||
image_upload_file_is: {}".format(upload_file_id)
|
||||
image_upload_file_is: %s",
|
||||
upload_file_id,
|
||||
)
|
||||
db.session.delete(image_file)
|
||||
db.session.delete(segment)
|
||||
@@ -115,9 +116,7 @@ def clean_dataset_task(
|
||||
db.session.commit()
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style(
|
||||
"Cleaned dataset when dataset deleted: {} latency: {}".format(dataset_id, end_at - start_at), fg="green"
|
||||
)
|
||||
click.style(f"Cleaned dataset when dataset deleted: {dataset_id} latency: {end_at - start_at}", fg="green")
|
||||
)
|
||||
except Exception:
|
||||
logging.exception("Cleaned dataset when dataset deleted failed")
|
||||
|
||||
@@ -24,7 +24,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
|
||||
|
||||
Usage: clean_document_task.delay(document_id, dataset_id)
|
||||
"""
|
||||
logging.info(click.style("Start clean document when document deleted: {}".format(document_id), fg="green"))
|
||||
logging.info(click.style(f"Start clean document when document deleted: {document_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
@@ -51,7 +51,8 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
|
||||
except Exception:
|
||||
logging.exception(
|
||||
"Delete image_files failed when storage deleted, \
|
||||
image_upload_file_is: {}".format(upload_file_id)
|
||||
image_upload_file_is: %s",
|
||||
upload_file_id,
|
||||
)
|
||||
db.session.delete(image_file)
|
||||
db.session.delete(segment)
|
||||
@@ -63,7 +64,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
|
||||
try:
|
||||
storage.delete(file.key)
|
||||
except Exception:
|
||||
logging.exception("Delete file failed when document deleted, file_id: {}".format(file_id))
|
||||
logging.exception("Delete file failed when document deleted, file_id: %s", file_id)
|
||||
db.session.delete(file)
|
||||
db.session.commit()
|
||||
|
||||
@@ -77,7 +78,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style(
|
||||
"Cleaned document when document deleted: {} latency: {}".format(document_id, end_at - start_at),
|
||||
f"Cleaned document when document deleted: {document_id} latency: {end_at - start_at}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -19,7 +19,7 @@ def clean_notion_document_task(document_ids: list[str], dataset_id: str):
|
||||
Usage: clean_notion_document_task.delay(document_ids, dataset_id)
|
||||
"""
|
||||
logging.info(
|
||||
click.style("Start clean document when import form notion document deleted: {}".format(dataset_id), fg="green")
|
||||
click.style(f"Start clean document when import form notion document deleted: {dataset_id}", fg="green")
|
||||
)
|
||||
start_at = time.perf_counter()
|
||||
|
||||
|
||||
@@ -21,19 +21,19 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]]
|
||||
:param keywords:
|
||||
Usage: create_segment_to_index_task.delay(segment_id)
|
||||
"""
|
||||
logging.info(click.style("Start create segment to index: {}".format(segment_id), fg="green"))
|
||||
logging.info(click.style(f"Start create segment to index: {segment_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_id).first()
|
||||
if not segment:
|
||||
logging.info(click.style("Segment not found: {}".format(segment_id), fg="red"))
|
||||
logging.info(click.style(f"Segment not found: {segment_id}", fg="red"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
if segment.status != "waiting":
|
||||
return
|
||||
|
||||
indexing_cache_key = "segment_{}_indexing".format(segment.id)
|
||||
indexing_cache_key = f"segment_{segment.id}_indexing"
|
||||
|
||||
try:
|
||||
# update segment status to indexing
|
||||
@@ -57,17 +57,17 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]]
|
||||
dataset = segment.dataset
|
||||
|
||||
if not dataset:
|
||||
logging.info(click.style("Segment {} has no dataset, pass.".format(segment.id), fg="cyan"))
|
||||
logging.info(click.style(f"Segment {segment.id} has no dataset, pass.", fg="cyan"))
|
||||
return
|
||||
|
||||
dataset_document = segment.document
|
||||
|
||||
if not dataset_document:
|
||||
logging.info(click.style("Segment {} has no document, pass.".format(segment.id), fg="cyan"))
|
||||
logging.info(click.style(f"Segment {segment.id} has no document, pass.", fg="cyan"))
|
||||
return
|
||||
|
||||
if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed":
|
||||
logging.info(click.style("Segment {} document status is invalid, pass.".format(segment.id), fg="cyan"))
|
||||
logging.info(click.style(f"Segment {segment.id} document status is invalid, pass.", fg="cyan"))
|
||||
return
|
||||
|
||||
index_type = dataset.doc_form
|
||||
@@ -84,9 +84,7 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]]
|
||||
db.session.commit()
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style("Segment created to index: {} latency: {}".format(segment.id, end_at - start_at), fg="green")
|
||||
)
|
||||
logging.info(click.style(f"Segment created to index: {segment.id} latency: {end_at - start_at}", fg="green"))
|
||||
except Exception as e:
|
||||
logging.exception("create segment to index failed")
|
||||
segment.enabled = False
|
||||
|
||||
@@ -20,7 +20,7 @@ def deal_dataset_vector_index_task(dataset_id: str, action: str):
|
||||
:param action: action
|
||||
Usage: deal_dataset_vector_index_task.delay(dataset_id, action)
|
||||
"""
|
||||
logging.info(click.style("Start deal dataset vector index: {}".format(dataset_id), fg="green"))
|
||||
logging.info(click.style(f"Start deal dataset vector index: {dataset_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
@@ -162,9 +162,7 @@ def deal_dataset_vector_index_task(dataset_id: str, action: str):
|
||||
index_processor.clean(dataset, None, with_keywords=False, delete_child_chunks=False)
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style("Deal dataset vector index: {} latency: {}".format(dataset_id, end_at - start_at), fg="green")
|
||||
)
|
||||
logging.info(click.style(f"Deal dataset vector index: {dataset_id} latency: {end_at - start_at}", fg="green"))
|
||||
except Exception:
|
||||
logging.exception("Deal dataset vector index failed")
|
||||
finally:
|
||||
|
||||
@@ -16,11 +16,11 @@ def delete_account_task(account_id):
|
||||
try:
|
||||
BillingService.delete_account(account_id)
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to delete account {account_id} from billing service.")
|
||||
logger.exception("Failed to delete account %s from billing service.", account_id)
|
||||
raise
|
||||
|
||||
if not account:
|
||||
logger.error(f"Account {account_id} not found.")
|
||||
logger.error("Account %s not found.", account_id)
|
||||
return
|
||||
# send success email
|
||||
send_deletion_success_task.delay(account.email)
|
||||
|
||||
@@ -38,7 +38,7 @@ def delete_segment_from_index_task(index_node_ids: list, dataset_id: str, docume
|
||||
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True)
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logging.info(click.style("Segment deleted from index latency: {}".format(end_at - start_at), fg="green"))
|
||||
logging.info(click.style(f"Segment deleted from index latency: {end_at - start_at}", fg="green"))
|
||||
except Exception:
|
||||
logging.exception("delete segment from index failed")
|
||||
finally:
|
||||
|
||||
@@ -18,37 +18,37 @@ def disable_segment_from_index_task(segment_id: str):
|
||||
|
||||
Usage: disable_segment_from_index_task.delay(segment_id)
|
||||
"""
|
||||
logging.info(click.style("Start disable segment from index: {}".format(segment_id), fg="green"))
|
||||
logging.info(click.style(f"Start disable segment from index: {segment_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_id).first()
|
||||
if not segment:
|
||||
logging.info(click.style("Segment not found: {}".format(segment_id), fg="red"))
|
||||
logging.info(click.style(f"Segment not found: {segment_id}", fg="red"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
if segment.status != "completed":
|
||||
logging.info(click.style("Segment is not completed, disable is not allowed: {}".format(segment_id), fg="red"))
|
||||
logging.info(click.style(f"Segment is not completed, disable is not allowed: {segment_id}", fg="red"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
indexing_cache_key = "segment_{}_indexing".format(segment.id)
|
||||
indexing_cache_key = f"segment_{segment.id}_indexing"
|
||||
|
||||
try:
|
||||
dataset = segment.dataset
|
||||
|
||||
if not dataset:
|
||||
logging.info(click.style("Segment {} has no dataset, pass.".format(segment.id), fg="cyan"))
|
||||
logging.info(click.style(f"Segment {segment.id} has no dataset, pass.", fg="cyan"))
|
||||
return
|
||||
|
||||
dataset_document = segment.document
|
||||
|
||||
if not dataset_document:
|
||||
logging.info(click.style("Segment {} has no document, pass.".format(segment.id), fg="cyan"))
|
||||
logging.info(click.style(f"Segment {segment.id} has no document, pass.", fg="cyan"))
|
||||
return
|
||||
|
||||
if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed":
|
||||
logging.info(click.style("Segment {} document status is invalid, pass.".format(segment.id), fg="cyan"))
|
||||
logging.info(click.style(f"Segment {segment.id} document status is invalid, pass.", fg="cyan"))
|
||||
return
|
||||
|
||||
index_type = dataset_document.doc_form
|
||||
@@ -56,9 +56,7 @@ def disable_segment_from_index_task(segment_id: str):
|
||||
index_processor.clean(dataset, [segment.index_node_id])
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style("Segment removed from index: {} latency: {}".format(segment.id, end_at - start_at), fg="green")
|
||||
)
|
||||
logging.info(click.style(f"Segment removed from index: {segment.id} latency: {end_at - start_at}", fg="green"))
|
||||
except Exception:
|
||||
logging.exception("remove segment from index failed")
|
||||
segment.enabled = True
|
||||
|
||||
@@ -25,18 +25,18 @@ def disable_segments_from_index_task(segment_ids: list, dataset_id: str, documen
|
||||
|
||||
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
logging.info(click.style("Dataset {} not found, pass.".format(dataset_id), fg="cyan"))
|
||||
logging.info(click.style(f"Dataset {dataset_id} not found, pass.", fg="cyan"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
dataset_document = db.session.query(DatasetDocument).where(DatasetDocument.id == document_id).first()
|
||||
|
||||
if not dataset_document:
|
||||
logging.info(click.style("Document {} not found, pass.".format(document_id), fg="cyan"))
|
||||
logging.info(click.style(f"Document {document_id} not found, pass.", fg="cyan"))
|
||||
db.session.close()
|
||||
return
|
||||
if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed":
|
||||
logging.info(click.style("Document {} status is invalid, pass.".format(document_id), fg="cyan"))
|
||||
logging.info(click.style(f"Document {document_id} status is invalid, pass.", fg="cyan"))
|
||||
db.session.close()
|
||||
return
|
||||
# sync index processor
|
||||
@@ -61,7 +61,7 @@ def disable_segments_from_index_task(segment_ids: list, dataset_id: str, documen
|
||||
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=False)
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logging.info(click.style("Segments removed from index latency: {}".format(end_at - start_at), fg="green"))
|
||||
logging.info(click.style(f"Segments removed from index latency: {end_at - start_at}", fg="green"))
|
||||
except Exception:
|
||||
# update segment error msg
|
||||
db.session.query(DocumentSegment).where(
|
||||
@@ -78,6 +78,6 @@ def disable_segments_from_index_task(segment_ids: list, dataset_id: str, documen
|
||||
db.session.commit()
|
||||
finally:
|
||||
for segment in segments:
|
||||
indexing_cache_key = "segment_{}_indexing".format(segment.id)
|
||||
indexing_cache_key = f"segment_{segment.id}_indexing"
|
||||
redis_client.delete(indexing_cache_key)
|
||||
db.session.close()
|
||||
|
||||
@@ -22,13 +22,13 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
|
||||
Usage: document_indexing_sync_task.delay(dataset_id, document_id)
|
||||
"""
|
||||
logging.info(click.style("Start sync document: {}".format(document_id), fg="green"))
|
||||
logging.info(click.style(f"Start sync document: {document_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
|
||||
if not document:
|
||||
logging.info(click.style("Document not found: {}".format(document_id), fg="red"))
|
||||
logging.info(click.style(f"Document not found: {document_id}", fg="red"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
@@ -108,10 +108,8 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
indexing_runner = IndexingRunner()
|
||||
indexing_runner.run([document])
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style("update document: {} latency: {}".format(document.id, end_at - start_at), fg="green")
|
||||
)
|
||||
logging.info(click.style(f"update document: {document.id} latency: {end_at - start_at}", fg="green"))
|
||||
except DocumentIsPausedError as ex:
|
||||
logging.info(click.style(str(ex), fg="yellow"))
|
||||
except Exception:
|
||||
logging.exception("document_indexing_sync_task failed, document_id: {}".format(document_id))
|
||||
logging.exception("document_indexing_sync_task failed, document_id: %s", document_id)
|
||||
|
||||
@@ -26,7 +26,7 @@ def document_indexing_task(dataset_id: str, document_ids: list):
|
||||
|
||||
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
logging.info(click.style("Dataset is not found: {}".format(dataset_id), fg="yellow"))
|
||||
logging.info(click.style(f"Dataset is not found: {dataset_id}", fg="yellow"))
|
||||
db.session.close()
|
||||
return
|
||||
# check document limit
|
||||
@@ -60,7 +60,7 @@ def document_indexing_task(dataset_id: str, document_ids: list):
|
||||
return
|
||||
|
||||
for document_id in document_ids:
|
||||
logging.info(click.style("Start process document: {}".format(document_id), fg="green"))
|
||||
logging.info(click.style(f"Start process document: {document_id}", fg="green"))
|
||||
|
||||
document = (
|
||||
db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
@@ -77,10 +77,10 @@ def document_indexing_task(dataset_id: str, document_ids: list):
|
||||
indexing_runner = IndexingRunner()
|
||||
indexing_runner.run(documents)
|
||||
end_at = time.perf_counter()
|
||||
logging.info(click.style("Processed dataset: {} latency: {}".format(dataset_id, end_at - start_at), fg="green"))
|
||||
logging.info(click.style(f"Processed dataset: {dataset_id} latency: {end_at - start_at}", fg="green"))
|
||||
except DocumentIsPausedError as ex:
|
||||
logging.info(click.style(str(ex), fg="yellow"))
|
||||
except Exception:
|
||||
logging.exception("Document indexing task failed, dataset_id: {}".format(dataset_id))
|
||||
logging.exception("Document indexing task failed, dataset_id: %s", dataset_id)
|
||||
finally:
|
||||
db.session.close()
|
||||
|
||||
@@ -20,13 +20,13 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
|
||||
|
||||
Usage: document_indexing_update_task.delay(dataset_id, document_id)
|
||||
"""
|
||||
logging.info(click.style("Start update document: {}".format(document_id), fg="green"))
|
||||
logging.info(click.style(f"Start update document: {document_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
|
||||
if not document:
|
||||
logging.info(click.style("Document not found: {}".format(document_id), fg="red"))
|
||||
logging.info(click.style(f"Document not found: {document_id}", fg="red"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
@@ -69,10 +69,10 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
|
||||
indexing_runner = IndexingRunner()
|
||||
indexing_runner.run([document])
|
||||
end_at = time.perf_counter()
|
||||
logging.info(click.style("update document: {} latency: {}".format(document.id, end_at - start_at), fg="green"))
|
||||
logging.info(click.style(f"update document: {document.id} latency: {end_at - start_at}", fg="green"))
|
||||
except DocumentIsPausedError as ex:
|
||||
logging.info(click.style(str(ex), fg="yellow"))
|
||||
except Exception:
|
||||
logging.exception("document_indexing_update_task failed, document_id: {}".format(document_id))
|
||||
logging.exception("document_indexing_update_task failed, document_id: %s", document_id)
|
||||
finally:
|
||||
db.session.close()
|
||||
|
||||
@@ -27,7 +27,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
|
||||
|
||||
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if dataset is None:
|
||||
logging.info(click.style("Dataset not found: {}".format(dataset_id), fg="red"))
|
||||
logging.info(click.style(f"Dataset not found: {dataset_id}", fg="red"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
@@ -63,7 +63,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
|
||||
db.session.close()
|
||||
|
||||
for document_id in document_ids:
|
||||
logging.info(click.style("Start process document: {}".format(document_id), fg="green"))
|
||||
logging.info(click.style(f"Start process document: {document_id}", fg="green"))
|
||||
|
||||
document = (
|
||||
db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
@@ -95,10 +95,10 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
|
||||
indexing_runner = IndexingRunner()
|
||||
indexing_runner.run(documents)
|
||||
end_at = time.perf_counter()
|
||||
logging.info(click.style("Processed dataset: {} latency: {}".format(dataset_id, end_at - start_at), fg="green"))
|
||||
logging.info(click.style(f"Processed dataset: {dataset_id} latency: {end_at - start_at}", fg="green"))
|
||||
except DocumentIsPausedError as ex:
|
||||
logging.info(click.style(str(ex), fg="yellow"))
|
||||
except Exception:
|
||||
logging.exception("duplicate_document_indexing_task failed, dataset_id: {}".format(dataset_id))
|
||||
logging.exception("duplicate_document_indexing_task failed, dataset_id: %s", dataset_id)
|
||||
finally:
|
||||
db.session.close()
|
||||
|
||||
@@ -21,21 +21,21 @@ def enable_segment_to_index_task(segment_id: str):
|
||||
|
||||
Usage: enable_segment_to_index_task.delay(segment_id)
|
||||
"""
|
||||
logging.info(click.style("Start enable segment to index: {}".format(segment_id), fg="green"))
|
||||
logging.info(click.style(f"Start enable segment to index: {segment_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_id).first()
|
||||
if not segment:
|
||||
logging.info(click.style("Segment not found: {}".format(segment_id), fg="red"))
|
||||
logging.info(click.style(f"Segment not found: {segment_id}", fg="red"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
if segment.status != "completed":
|
||||
logging.info(click.style("Segment is not completed, enable is not allowed: {}".format(segment_id), fg="red"))
|
||||
logging.info(click.style(f"Segment is not completed, enable is not allowed: {segment_id}", fg="red"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
indexing_cache_key = "segment_{}_indexing".format(segment.id)
|
||||
indexing_cache_key = f"segment_{segment.id}_indexing"
|
||||
|
||||
try:
|
||||
document = Document(
|
||||
@@ -51,17 +51,17 @@ def enable_segment_to_index_task(segment_id: str):
|
||||
dataset = segment.dataset
|
||||
|
||||
if not dataset:
|
||||
logging.info(click.style("Segment {} has no dataset, pass.".format(segment.id), fg="cyan"))
|
||||
logging.info(click.style(f"Segment {segment.id} has no dataset, pass.", fg="cyan"))
|
||||
return
|
||||
|
||||
dataset_document = segment.document
|
||||
|
||||
if not dataset_document:
|
||||
logging.info(click.style("Segment {} has no document, pass.".format(segment.id), fg="cyan"))
|
||||
logging.info(click.style(f"Segment {segment.id} has no document, pass.", fg="cyan"))
|
||||
return
|
||||
|
||||
if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed":
|
||||
logging.info(click.style("Segment {} document status is invalid, pass.".format(segment.id), fg="cyan"))
|
||||
logging.info(click.style(f"Segment {segment.id} document status is invalid, pass.", fg="cyan"))
|
||||
return
|
||||
|
||||
index_processor = IndexProcessorFactory(dataset_document.doc_form).init_index_processor()
|
||||
@@ -85,9 +85,7 @@ def enable_segment_to_index_task(segment_id: str):
|
||||
index_processor.load(dataset, [document])
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style("Segment enabled to index: {} latency: {}".format(segment.id, end_at - start_at), fg="green")
|
||||
)
|
||||
logging.info(click.style(f"Segment enabled to index: {segment.id} latency: {end_at - start_at}", fg="green"))
|
||||
except Exception as e:
|
||||
logging.exception("enable segment to index failed")
|
||||
segment.enabled = False
|
||||
|
||||
@@ -27,17 +27,17 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i
|
||||
start_at = time.perf_counter()
|
||||
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
logging.info(click.style("Dataset {} not found, pass.".format(dataset_id), fg="cyan"))
|
||||
logging.info(click.style(f"Dataset {dataset_id} not found, pass.", fg="cyan"))
|
||||
return
|
||||
|
||||
dataset_document = db.session.query(DatasetDocument).where(DatasetDocument.id == document_id).first()
|
||||
|
||||
if not dataset_document:
|
||||
logging.info(click.style("Document {} not found, pass.".format(document_id), fg="cyan"))
|
||||
logging.info(click.style(f"Document {document_id} not found, pass.", fg="cyan"))
|
||||
db.session.close()
|
||||
return
|
||||
if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed":
|
||||
logging.info(click.style("Document {} status is invalid, pass.".format(document_id), fg="cyan"))
|
||||
logging.info(click.style(f"Document {document_id} status is invalid, pass.", fg="cyan"))
|
||||
db.session.close()
|
||||
return
|
||||
# sync index processor
|
||||
@@ -53,7 +53,7 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i
|
||||
.all()
|
||||
)
|
||||
if not segments:
|
||||
logging.info(click.style("Segments not found: {}".format(segment_ids), fg="cyan"))
|
||||
logging.info(click.style(f"Segments not found: {segment_ids}", fg="cyan"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
@@ -91,7 +91,7 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i
|
||||
index_processor.load(dataset, documents)
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logging.info(click.style("Segments enabled to index latency: {}".format(end_at - start_at), fg="green"))
|
||||
logging.info(click.style(f"Segments enabled to index latency: {end_at - start_at}", fg="green"))
|
||||
except Exception as e:
|
||||
logging.exception("enable segments to index failed")
|
||||
# update segment error msg
|
||||
@@ -110,6 +110,6 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i
|
||||
db.session.commit()
|
||||
finally:
|
||||
for segment in segments:
|
||||
indexing_cache_key = "segment_{}_indexing".format(segment.id)
|
||||
indexing_cache_key = f"segment_{segment.id}_indexing"
|
||||
redis_client.delete(indexing_cache_key)
|
||||
db.session.close()
|
||||
|
||||
@@ -37,12 +37,10 @@ def send_deletion_success_task(to: str, language: str = "en-US") -> None:
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style(
|
||||
"Send account deletion success email to {}: latency: {}".format(to, end_at - start_at), fg="green"
|
||||
)
|
||||
click.style(f"Send account deletion success email to {to}: latency: {end_at - start_at}", fg="green")
|
||||
)
|
||||
except Exception:
|
||||
logging.exception("Send account deletion success email to {} failed".format(to))
|
||||
logging.exception("Send account deletion success email to %s failed", to)
|
||||
|
||||
|
||||
@shared_task(queue="mail")
|
||||
@@ -83,4 +81,4 @@ def send_account_deletion_verification_code(to: str, code: str, language: str =
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
logging.exception("Send account deletion verification code email to {} failed".format(to))
|
||||
logging.exception("Send account deletion verification code email to %s failed", to)
|
||||
|
||||
@@ -22,7 +22,7 @@ def send_change_mail_task(language: str, to: str, code: str, phase: str) -> None
|
||||
if not mail.is_inited():
|
||||
return
|
||||
|
||||
logging.info(click.style("Start change email mail to {}".format(to), fg="green"))
|
||||
logging.info(click.style(f"Start change email mail to {to}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
@@ -35,11 +35,9 @@ def send_change_mail_task(language: str, to: str, code: str, phase: str) -> None
|
||||
)
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style("Send change email mail to {} succeeded: latency: {}".format(to, end_at - start_at), fg="green")
|
||||
)
|
||||
logging.info(click.style(f"Send change email mail to {to} succeeded: latency: {end_at - start_at}", fg="green"))
|
||||
except Exception:
|
||||
logging.exception("Send change email mail to {} failed".format(to))
|
||||
logging.exception("Send change email mail to %s failed", to)
|
||||
|
||||
|
||||
@shared_task(queue="mail")
|
||||
@@ -54,7 +52,7 @@ def send_change_mail_completed_notification_task(language: str, to: str) -> None
|
||||
if not mail.is_inited():
|
||||
return
|
||||
|
||||
logging.info(click.style("Start change email completed notify mail to {}".format(to), fg="green"))
|
||||
logging.info(click.style(f"Start change email completed notify mail to {to}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
@@ -72,9 +70,9 @@ def send_change_mail_completed_notification_task(language: str, to: str) -> None
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style(
|
||||
"Send change email completed mail to {} succeeded: latency: {}".format(to, end_at - start_at),
|
||||
f"Send change email completed mail to {to} succeeded: latency: {end_at - start_at}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
logging.exception("Send change email completed mail to {} failed".format(to))
|
||||
logging.exception("Send change email completed mail to %s failed", to)
|
||||
|
||||
@@ -21,7 +21,7 @@ def send_email_code_login_mail_task(language: str, to: str, code: str) -> None:
|
||||
if not mail.is_inited():
|
||||
return
|
||||
|
||||
logging.info(click.style("Start email code login mail to {}".format(to), fg="green"))
|
||||
logging.info(click.style(f"Start email code login mail to {to}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
@@ -38,9 +38,7 @@ def send_email_code_login_mail_task(language: str, to: str, code: str) -> None:
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style(
|
||||
"Send email code login mail to {} succeeded: latency: {}".format(to, end_at - start_at), fg="green"
|
||||
)
|
||||
click.style(f"Send email code login mail to {to} succeeded: latency: {end_at - start_at}", fg="green")
|
||||
)
|
||||
except Exception:
|
||||
logging.exception("Send email code login mail to {} failed".format(to))
|
||||
logging.exception("Send email code login mail to %s failed", to)
|
||||
|
||||
@@ -15,7 +15,7 @@ def send_enterprise_email_task(to: list[str], subject: str, body: str, substitut
|
||||
if not mail.is_inited():
|
||||
return
|
||||
|
||||
logging.info(click.style("Start enterprise mail to {} with subject {}".format(to, subject), fg="green"))
|
||||
logging.info(click.style(f"Start enterprise mail to {to} with subject {subject}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
@@ -25,8 +25,6 @@ def send_enterprise_email_task(to: list[str], subject: str, body: str, substitut
|
||||
email_service.send_raw_email(to=to, subject=subject, html_content=html_content)
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style("Send enterprise mail to {} succeeded: latency: {}".format(to, end_at - start_at), fg="green")
|
||||
)
|
||||
logging.info(click.style(f"Send enterprise mail to {to} succeeded: latency: {end_at - start_at}", fg="green"))
|
||||
except Exception:
|
||||
logging.exception("Send enterprise mail to {} failed".format(to))
|
||||
logging.exception("Send enterprise mail to %s failed", to)
|
||||
|
||||
@@ -24,9 +24,7 @@ def send_invite_member_mail_task(language: str, to: str, token: str, inviter_nam
|
||||
if not mail.is_inited():
|
||||
return
|
||||
|
||||
logging.info(
|
||||
click.style("Start send invite member mail to {} in workspace {}".format(to, workspace_name), fg="green")
|
||||
)
|
||||
logging.info(click.style(f"Start send invite member mail to {to} in workspace {workspace_name}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
@@ -46,9 +44,7 @@ def send_invite_member_mail_task(language: str, to: str, token: str, inviter_nam
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style(
|
||||
"Send invite member mail to {} succeeded: latency: {}".format(to, end_at - start_at), fg="green"
|
||||
)
|
||||
click.style(f"Send invite member mail to {to} succeeded: latency: {end_at - start_at}", fg="green")
|
||||
)
|
||||
except Exception:
|
||||
logging.exception("Send invite member mail to {} failed".format(to))
|
||||
logging.exception("Send invite member mail to %s failed", to)
|
||||
|
||||
@@ -22,7 +22,7 @@ def send_owner_transfer_confirm_task(language: str, to: str, code: str, workspac
|
||||
if not mail.is_inited():
|
||||
return
|
||||
|
||||
logging.info(click.style("Start owner transfer confirm mail to {}".format(to), fg="green"))
|
||||
logging.info(click.style(f"Start owner transfer confirm mail to {to}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
@@ -41,12 +41,12 @@ def send_owner_transfer_confirm_task(language: str, to: str, code: str, workspac
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style(
|
||||
"Send owner transfer confirm mail to {} succeeded: latency: {}".format(to, end_at - start_at),
|
||||
f"Send owner transfer confirm mail to {to} succeeded: latency: {end_at - start_at}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
logging.exception("owner transfer confirm email mail to {} failed".format(to))
|
||||
logging.exception("owner transfer confirm email mail to %s failed", to)
|
||||
|
||||
|
||||
@shared_task(queue="mail")
|
||||
@@ -63,7 +63,7 @@ def send_old_owner_transfer_notify_email_task(language: str, to: str, workspace:
|
||||
if not mail.is_inited():
|
||||
return
|
||||
|
||||
logging.info(click.style("Start old owner transfer notify mail to {}".format(to), fg="green"))
|
||||
logging.info(click.style(f"Start old owner transfer notify mail to {to}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
@@ -82,12 +82,12 @@ def send_old_owner_transfer_notify_email_task(language: str, to: str, workspace:
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style(
|
||||
"Send old owner transfer notify mail to {} succeeded: latency: {}".format(to, end_at - start_at),
|
||||
f"Send old owner transfer notify mail to {to} succeeded: latency: {end_at - start_at}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
logging.exception("old owner transfer notify email mail to {} failed".format(to))
|
||||
logging.exception("old owner transfer notify email mail to %s failed", to)
|
||||
|
||||
|
||||
@shared_task(queue="mail")
|
||||
@@ -103,7 +103,7 @@ def send_new_owner_transfer_notify_email_task(language: str, to: str, workspace:
|
||||
if not mail.is_inited():
|
||||
return
|
||||
|
||||
logging.info(click.style("Start new owner transfer notify mail to {}".format(to), fg="green"))
|
||||
logging.info(click.style(f"Start new owner transfer notify mail to {to}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
@@ -121,9 +121,9 @@ def send_new_owner_transfer_notify_email_task(language: str, to: str, workspace:
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style(
|
||||
"Send new owner transfer notify mail to {} succeeded: latency: {}".format(to, end_at - start_at),
|
||||
f"Send new owner transfer notify mail to {to} succeeded: latency: {end_at - start_at}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
logging.exception("new owner transfer notify email mail to {} failed".format(to))
|
||||
logging.exception("new owner transfer notify email mail to %s failed", to)
|
||||
|
||||
@@ -21,7 +21,7 @@ def send_reset_password_mail_task(language: str, to: str, code: str) -> None:
|
||||
if not mail.is_inited():
|
||||
return
|
||||
|
||||
logging.info(click.style("Start password reset mail to {}".format(to), fg="green"))
|
||||
logging.info(click.style(f"Start password reset mail to {to}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
@@ -38,9 +38,7 @@ def send_reset_password_mail_task(language: str, to: str, code: str) -> None:
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style(
|
||||
"Send password reset mail to {} succeeded: latency: {}".format(to, end_at - start_at), fg="green"
|
||||
)
|
||||
click.style(f"Send password reset mail to {to} succeeded: latency: {end_at - start_at}", fg="green")
|
||||
)
|
||||
except Exception:
|
||||
logging.exception("Send password reset mail to {} failed".format(to))
|
||||
logging.exception("Send password reset mail to %s failed", to)
|
||||
|
||||
@@ -43,13 +43,11 @@ def process_trace_tasks(file_info):
|
||||
if trace_type:
|
||||
trace_info = trace_type(**trace_info)
|
||||
trace_instance.trace(trace_info)
|
||||
logging.info(f"Processing trace tasks success, app_id: {app_id}")
|
||||
logging.info("Processing trace tasks success, app_id: %s", app_id)
|
||||
except Exception as e:
|
||||
logging.info(
|
||||
f"error:\n\n\n{e}\n\n\n\n",
|
||||
)
|
||||
logging.info("error:\n\n\n%s\n\n\n\n", e)
|
||||
failed_key = f"{OPS_TRACE_FAILED_KEY}_{app_id}"
|
||||
redis_client.incr(failed_key)
|
||||
logging.info(f"Processing trace tasks failed, app_id: {app_id}")
|
||||
logging.info("Processing trace tasks failed, app_id: %s", app_id)
|
||||
finally:
|
||||
storage.delete(file_path)
|
||||
|
||||
@@ -58,7 +58,7 @@ def process_tenant_plugin_autoupgrade_check_task(
|
||||
|
||||
click.echo(
|
||||
click.style(
|
||||
"Checking upgradable plugin for tenant: {}".format(tenant_id),
|
||||
f"Checking upgradable plugin for tenant: {tenant_id}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
@@ -68,7 +68,7 @@ def process_tenant_plugin_autoupgrade_check_task(
|
||||
|
||||
# get plugin_ids to check
|
||||
plugin_ids: list[tuple[str, str, str]] = [] # plugin_id, version, unique_identifier
|
||||
click.echo(click.style("Upgrade mode: {}".format(upgrade_mode), fg="green"))
|
||||
click.echo(click.style(f"Upgrade mode: {upgrade_mode}", fg="green"))
|
||||
|
||||
if upgrade_mode == TenantPluginAutoUpgradeStrategy.UpgradeMode.PARTIAL and include_plugins:
|
||||
all_plugins = manager.list_plugins(tenant_id)
|
||||
@@ -142,7 +142,7 @@ def process_tenant_plugin_autoupgrade_check_task(
|
||||
marketplace.record_install_plugin_event(new_unique_identifier)
|
||||
click.echo(
|
||||
click.style(
|
||||
"Upgrade plugin: {} -> {}".format(original_unique_identifier, new_unique_identifier),
|
||||
f"Upgrade plugin: {original_unique_identifier} -> {new_unique_identifier}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
@@ -156,11 +156,11 @@ def process_tenant_plugin_autoupgrade_check_task(
|
||||
},
|
||||
)
|
||||
except Exception as e:
|
||||
click.echo(click.style("Error when upgrading plugin: {}".format(e), fg="red"))
|
||||
click.echo(click.style(f"Error when upgrading plugin: {e}", fg="red"))
|
||||
traceback.print_exc()
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
click.echo(click.style("Error when checking upgradable plugin: {}".format(e), fg="red"))
|
||||
click.echo(click.style(f"Error when checking upgradable plugin: {e}", fg="red"))
|
||||
traceback.print_exc()
|
||||
return
|
||||
|
||||
@@ -18,13 +18,13 @@ def recover_document_indexing_task(dataset_id: str, document_id: str):
|
||||
|
||||
Usage: recover_document_indexing_task.delay(dataset_id, document_id)
|
||||
"""
|
||||
logging.info(click.style("Recover document: {}".format(document_id), fg="green"))
|
||||
logging.info(click.style(f"Recover document: {document_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
|
||||
if not document:
|
||||
logging.info(click.style("Document not found: {}".format(document_id), fg="red"))
|
||||
logging.info(click.style(f"Document not found: {document_id}", fg="red"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
@@ -37,12 +37,10 @@ def recover_document_indexing_task(dataset_id: str, document_id: str):
|
||||
elif document.indexing_status == "indexing":
|
||||
indexing_runner.run_in_indexing_status(document)
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style("Processed document: {} latency: {}".format(document.id, end_at - start_at), fg="green")
|
||||
)
|
||||
logging.info(click.style(f"Processed document: {document.id} latency: {end_at - start_at}", fg="green"))
|
||||
except DocumentIsPausedError as ex:
|
||||
logging.info(click.style(str(ex), fg="yellow"))
|
||||
except Exception:
|
||||
logging.exception("recover_document_indexing_task failed, document_id: {}".format(document_id))
|
||||
logging.exception("recover_document_indexing_task failed, document_id: %s", document_id)
|
||||
finally:
|
||||
db.session.close()
|
||||
|
||||
@@ -201,7 +201,7 @@ def _delete_app_workflow_runs(tenant_id: str, app_id: str):
|
||||
batch_size=1000,
|
||||
)
|
||||
|
||||
logging.info(f"Deleted {deleted_count} workflow runs for app {app_id}")
|
||||
logging.info("Deleted %s workflow runs for app %s", deleted_count, app_id)
|
||||
|
||||
|
||||
def _delete_app_workflow_node_executions(tenant_id: str, app_id: str):
|
||||
@@ -215,7 +215,7 @@ def _delete_app_workflow_node_executions(tenant_id: str, app_id: str):
|
||||
batch_size=1000,
|
||||
)
|
||||
|
||||
logging.info(f"Deleted {deleted_count} workflow node executions for app {app_id}")
|
||||
logging.info("Deleted %s workflow node executions for app %s", deleted_count, app_id)
|
||||
|
||||
|
||||
def _delete_app_workflow_app_logs(tenant_id: str, app_id: str):
|
||||
@@ -342,6 +342,6 @@ def _delete_records(query_sql: str, params: dict, delete_func: Callable, name: s
|
||||
db.session.commit()
|
||||
logging.info(click.style(f"Deleted {name} {record_id}", fg="green"))
|
||||
except Exception:
|
||||
logging.exception(f"Error occurred while deleting {name} {record_id}")
|
||||
logging.exception("Error occurred while deleting %s %s", name, record_id)
|
||||
continue
|
||||
rs.close()
|
||||
|
||||
@@ -19,21 +19,21 @@ def remove_document_from_index_task(document_id: str):
|
||||
|
||||
Usage: remove_document_from_index.delay(document_id)
|
||||
"""
|
||||
logging.info(click.style("Start remove document segments from index: {}".format(document_id), fg="green"))
|
||||
logging.info(click.style(f"Start remove document segments from index: {document_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
document = db.session.query(Document).where(Document.id == document_id).first()
|
||||
if not document:
|
||||
logging.info(click.style("Document not found: {}".format(document_id), fg="red"))
|
||||
logging.info(click.style(f"Document not found: {document_id}", fg="red"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
if document.indexing_status != "completed":
|
||||
logging.info(click.style("Document is not completed, remove is not allowed: {}".format(document_id), fg="red"))
|
||||
logging.info(click.style(f"Document is not completed, remove is not allowed: {document_id}", fg="red"))
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
indexing_cache_key = "document_{}_indexing".format(document.id)
|
||||
indexing_cache_key = f"document_{document.id}_indexing"
|
||||
|
||||
try:
|
||||
dataset = document.dataset
|
||||
@@ -49,7 +49,7 @@ def remove_document_from_index_task(document_id: str):
|
||||
try:
|
||||
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=False)
|
||||
except Exception:
|
||||
logging.exception(f"clean dataset {dataset.id} from index failed")
|
||||
logging.exception("clean dataset %s from index failed", dataset.id)
|
||||
# update segment to disable
|
||||
db.session.query(DocumentSegment).where(DocumentSegment.document_id == document.id).update(
|
||||
{
|
||||
@@ -63,9 +63,7 @@ def remove_document_from_index_task(document_id: str):
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logging.info(
|
||||
click.style(
|
||||
"Document removed from index: {} latency: {}".format(document.id, end_at - start_at), fg="green"
|
||||
)
|
||||
click.style(f"Document removed from index: {document.id} latency: {end_at - start_at}", fg="green")
|
||||
)
|
||||
except Exception:
|
||||
logging.exception("remove document from index failed")
|
||||
|
||||
@@ -27,12 +27,12 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
|
||||
|
||||
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
logging.info(click.style("Dataset not found: {}".format(dataset_id), fg="red"))
|
||||
logging.info(click.style(f"Dataset not found: {dataset_id}", fg="red"))
|
||||
db.session.close()
|
||||
return
|
||||
tenant_id = dataset.tenant_id
|
||||
for document_id in document_ids:
|
||||
retry_indexing_cache_key = "document_{}_is_retried".format(document_id)
|
||||
retry_indexing_cache_key = f"document_{document_id}_is_retried"
|
||||
# check document limit
|
||||
features = FeatureService.get_features(tenant_id)
|
||||
try:
|
||||
@@ -57,12 +57,12 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
|
||||
db.session.close()
|
||||
return
|
||||
|
||||
logging.info(click.style("Start retry document: {}".format(document_id), fg="green"))
|
||||
logging.info(click.style(f"Start retry document: {document_id}", fg="green"))
|
||||
document = (
|
||||
db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
)
|
||||
if not document:
|
||||
logging.info(click.style("Document not found: {}".format(document_id), fg="yellow"))
|
||||
logging.info(click.style(f"Document not found: {document_id}", fg="yellow"))
|
||||
db.session.close()
|
||||
return
|
||||
try:
|
||||
@@ -95,8 +95,8 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
|
||||
db.session.commit()
|
||||
logging.info(click.style(str(ex), fg="yellow"))
|
||||
redis_client.delete(retry_indexing_cache_key)
|
||||
logging.exception("retry_document_indexing_task failed, document_id: {}".format(document_id))
|
||||
logging.exception("retry_document_indexing_task failed, document_id: %s", document_id)
|
||||
finally:
|
||||
db.session.close()
|
||||
end_at = time.perf_counter()
|
||||
logging.info(click.style("Retry dataset: {} latency: {}".format(dataset_id, end_at - start_at), fg="green"))
|
||||
logging.info(click.style(f"Retry dataset: {dataset_id} latency: {end_at - start_at}", fg="green"))
|
||||
|
||||
@@ -28,7 +28,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
|
||||
if dataset is None:
|
||||
raise ValueError("Dataset not found")
|
||||
|
||||
sync_indexing_cache_key = "document_{}_is_sync".format(document_id)
|
||||
sync_indexing_cache_key = f"document_{document_id}_is_sync"
|
||||
# check document limit
|
||||
features = FeatureService.get_features(dataset.tenant_id)
|
||||
try:
|
||||
@@ -52,10 +52,10 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
|
||||
redis_client.delete(sync_indexing_cache_key)
|
||||
return
|
||||
|
||||
logging.info(click.style("Start sync website document: {}".format(document_id), fg="green"))
|
||||
logging.info(click.style(f"Start sync website document: {document_id}", fg="green"))
|
||||
document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
if not document:
|
||||
logging.info(click.style("Document not found: {}".format(document_id), fg="yellow"))
|
||||
logging.info(click.style(f"Document not found: {document_id}", fg="yellow"))
|
||||
return
|
||||
try:
|
||||
# clean old data
|
||||
@@ -87,6 +87,6 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
|
||||
db.session.commit()
|
||||
logging.info(click.style(str(ex), fg="yellow"))
|
||||
redis_client.delete(sync_indexing_cache_key)
|
||||
logging.exception("sync_website_document_indexing_task failed, document_id: {}".format(document_id))
|
||||
logging.exception("sync_website_document_indexing_task failed, document_id: %s", document_id)
|
||||
end_at = time.perf_counter()
|
||||
logging.info(click.style("Sync document: {} latency: {}".format(document_id, end_at - start_at), fg="green"))
|
||||
logging.info(click.style(f"Sync document: {document_id} latency: {end_at - start_at}", fg="green"))
|
||||
|
||||
Reference in New Issue
Block a user