Skip to content
This repository has been archived by the owner on May 28, 2024. It is now read-only.

Commit

Permalink
fixed massive log issue
Browse files Browse the repository at this point in the history
  • Loading branch information
leftmove committed Feb 22, 2024
1 parent 08e071c commit 67e4486
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 14 deletions.
23 changes: 14 additions & 9 deletions routers/filer.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,8 +319,20 @@ async def logs(cik: str, start: int = 0):
for raw_log in log["logs"]:
logs.extend(raw_log.split("\n"))

if calculate_skip:
cursor = database.search_logs(
[
{"$match": {"cik": cik}},
{"$project": {"count": {"$size": ["$logs"]}}},
]
)
result = next(cursor)
start = result["count"]

if filer_status == 2:
return JSONResponse(status_code=200, content={"logs": logs, "time": time})
return JSONResponse(
status_code=200, content={"logs": logs, "time": time, "skip": start}
)

count = len(logs)
log["count"] = count
Expand All @@ -333,13 +345,6 @@ async def logs(cik: str, start: int = 0):
elapsed = datetime.now().timestamp() - log["start"]
remaining = required - elapsed if filer_status <= 3 else 0

if calculate_skip:
cursor = database.search_filers(
[{"$match": {"cik": cik}}, {"$project": {"count": {"$size": "$logs"}}}]
)
result = next(cursor)
start = result["count"]

log["time"]["elapsed"] = elapsed
log["time"]["remaining"] = remaining

Expand All @@ -348,7 +353,7 @@ async def logs(cik: str, start: int = 0):
return {
"logs": logs,
"time": time,
"skipped": start,
"skip": start,
"status": filer_status,
}

Expand Down
10 changes: 5 additions & 5 deletions routers/utils/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,11 @@ def edit_specific_log(query, value):
logs.update_one(query, value)


def search_logs(pipeline):
cursor = logs.aggregate(pipeline)
return cursor


def delete_logs(query):
logs.delete_many(query)

Expand All @@ -159,11 +164,6 @@ def watch_logs(pipeline):
return cursor


def aggregate_filers(pipeline):
cursor = main.aggregate(pipeline)
return cursor


def add_query_log(cik, query):
try:
filer_done = find_filer(cik, {"cik": 1, "name": 1, "_id": 0})
Expand Down

0 comments on commit 67e4486

Please sign in to comment.