Skip to content
This repository has been archived by the owner on May 28, 2024. It is now read-only.

Commit

Permalink
error logging
Browse files Browse the repository at this point in the history
  • Loading branch information
leftmove committed May 3, 2024
1 parent a760766 commit ad30966
Show file tree
Hide file tree
Showing 12 changed files with 112 additions and 138 deletions.
11 changes: 1 addition & 10 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
from fastapi.middleware.cors import CORSMiddleware

import uvicorn
import logging
import os
import multiprocessing

Expand All @@ -12,10 +11,7 @@
from routers import stocks

from routers.utils import (
# PrometheusMiddleware,
# metrics,
# EndpointFilter,
# setting_otlp,
log_config,
initialize,
)

Expand All @@ -33,11 +29,6 @@
CORSMiddleware, allow_origins=["*"], allow_methods=["*"], allow_headers=["*"]
),
]
log = logging.getLogger("uvicorn.access")
log_config = uvicorn.config.LOGGING_CONFIG
log_config["formatters"]["access"]["fmt"] = (
"%(asctime)s %(levelname)s [%(name)s] [%(filename)s:%(lineno)d] - %(message)s"
)

app = FastAPI(middleware=middleware)
app.include_router(general.router)
Expand Down
55 changes: 24 additions & 31 deletions routers/filer.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,20 @@
from fastapi import BackgroundTasks, HTTPException, APIRouter
from fastapi import HTTPException, APIRouter
from fastapi.responses import FileResponse, JSONResponse
from pydantic import BaseModel

import json
import os
import logging
from urllib import parse
from traceback import format_exc
from datetime import datetime

from . import worker

from .lib import web
from .lib import database
from .lib import analysis

from .lib.errors import report_error
from .lib.search import search_companies
from .lib.api import sec_filer_search
from .lib.cache import cache
Expand Down Expand Up @@ -67,13 +68,10 @@ class HTTPError(BaseModel):
# Note: Once two is set, three MUST be cancelled.


def create_recent(cik, company, stamp, backgound: BackgroundTasks = None):
def create_recent(cik, company, stamp):
filer_query = {"cik": cik}
company_name = company["name"]

if backgound:
backgound.add_task(web.estimate_time_newest, cik)

try:
last_report = company["last_report"]
recent_filing = database.find_filing(cik, last_report)
Expand All @@ -89,7 +87,7 @@ def create_recent(cik, company, stamp, backgound: BackgroundTasks = None):

database.add_log(cik, "Queried Filer Recent Stocks", company_name, cik)
except Exception as e:
logging.error(e)
report_error(cik, e)
raise HTTPException(status_code=500, detail="Error getting newest stocks.")

try:
Expand Down Expand Up @@ -120,20 +118,20 @@ def create_recent(cik, company, stamp, backgound: BackgroundTasks = None):
database.add_log(cik, "Updated Filer Recent Stocks", company_name, cik)
database.edit_status(cik, 2)
except Exception as e:
report_error(cik, e)
database.edit_filer(
{"cik": cik}, {"$set": {"market_value": "NA", "update": False}}
)
database.add_log(cik, "Failed to Update Filer Recent Stocks", company_name, cik)
database.edit_status(cik, 2)
logging.error(e)

start = stamp["start"]
stamp = {"time.elapsed": datetime.now().timestamp() - start}
database.edit_log(cik, stamp)
database.add_query_log(cik, "create-latest")


def create_historical(cik, company, stamp, background=None):
def create_historical(cik, company, stamp):
filer_query = {"cik": cik}
company_name = company["name"]
last_report = company["last_report"]
Expand All @@ -148,7 +146,7 @@ def create_historical(cik, company, stamp, background=None):

database.add_log(cik, "Queried Filer Historical Stocks", company_name, cik)
except Exception as e:
logging.error(e)
report_error(cik, e)
database.add_log(
cik, "Failed to Query Filer Historical Stocks", company_name, cik
)
Expand Down Expand Up @@ -188,26 +186,26 @@ def create_historical(cik, company, stamp, background=None):
database.add_log(cik, "Updated Filer Historical Stocks", company_name, cik)
database.edit_status(cik, 0)
except Exception as e:
report_error(cik, e)
database.edit_filer(filer_query, {"$set": {"update": False}})
database.add_log(
cik, "Failed to Update Filer Historical Stocks", company_name, cik
)
database.edit_status(cik, 0)
logging.error(e)

start = stamp["start"]
stamp = {"time.elapsed": datetime.now().timestamp() - start, "logs": []}
database.edit_log(cik, stamp)
database.add_query_log(cik, "create-historical")


def create_filer(cik, sec_data, background=None):
def create_filer(cik, sec_data):
company, stamp = web.initalize_filer(cik, sec_data)
create_recent(cik, company, stamp, background)
create_recent(cik, company, stamp)
create_historical(cik, company, stamp)


def update_filer(company, background):
def update_filer(company):
cik = company["cik"]
time = datetime.now().timestamp()

Expand All @@ -229,7 +227,7 @@ def update_filer(company, background):
database.edit_filer({"cik": cik}, {"$set": {"last_report": last_report}})

stamp = {"name": company["name"], "start": time}
background.add_task(create_historical, cik, company, stamp)
worker.create_historical.delay(cik, company, stamp)

return {"description": "Filer update started."}

Expand All @@ -239,7 +237,7 @@ def update_filer(company, background):
tags=["filers"],
status_code=201,
)
async def query_filer(cik: str, background: BackgroundTasks):
async def query_filer(cik: str):
filer = database.find_filer(cik)
if not filer:
try:
Expand All @@ -248,16 +246,16 @@ async def query_filer(cik: str, background: BackgroundTasks):
logging.error(e)
raise HTTPException(404, detail="CIK not found.")

background.add_task(create_filer, cik, sec_data, background)
worker.create_filer.delay(cik, sec_data)
res = {"description": "Filer creation started."}
else:
res = update_filer(filer, background)
res = update_filer(filer)

return res


@router.get("/rollback", tags=["filers"], status_code=201, include_in_schema=False)
async def rollback_filer(cik: str, password: str, background: BackgroundTasks):
async def rollback_filer(cik: str, password: str):
filer = database.find_filer(cik, {"last_report": 1})
if not filer:
raise HTTPException(404, detail="CIK not found.")
Expand Down Expand Up @@ -297,7 +295,7 @@ async def rollback_filer(cik: str, password: str, background: BackgroundTasks):

start = datetime.now().timestamp()
stamp = {"name": filer["name"], "start": start}
background.add_task(create_historical, cik, filer, stamp)
worker.create_historical(cik, filer, stamp)

return {"description": "Filer rollback started."}

Expand Down Expand Up @@ -652,22 +650,22 @@ async def popular_ciks():
return {"filers": filers_sorted}


def create_filer_try(cik, background=None):
def create_filer_try(cik):
try:
filer = database.find_filer(cik)
if filer is None:
try:
sec_data = sec_filer_search(cik)
except Exception:
raise HTTPException(status_code=404, detail="CIK not found.")
create_filer(cik, sec_data, background)
create_filer(cik, sec_data)
else:
raise HTTPException(detail="Filer already exists.", status_code=409)
except Exception as e:
logging.info("Error Occured\n", e)
report_error(cik, e)


def create_filer_replace(cik, background=None):
def create_filer_replace(cik):
try:
filer = database.find_filer(cik, {"_id": 1})
if filer:
Expand All @@ -676,15 +674,10 @@ def create_filer_replace(cik, background=None):
sec_data = sec_filer_search(cik)
except Exception:
raise HTTPException(status_code=404, detail="CIK not found.")
create_filer(cik, sec_data, background)
create_filer(cik, sec_data)

except Exception as e:
stamp = str(datetime.now())
cwd = os.getcwd()
with open(f"{cwd}/static/errors/error-{stamp}.log", "w") as f:
error_string = f"Failed to Query Filer {cik}\n{repr(e)}\n{format_exc()}"
f.write(error_string)
logging.info("Error Occured\n", e)
report_error(cik, e)


@router.get("/remove", status_code=200, include_in_schema=False)
Expand Down
20 changes: 3 additions & 17 deletions routers/general.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,20 +3,15 @@
from fastapi.responses import FileResponse

import os

from traceback import format_exc
from datetime import datetime
import logging

from .lib import database
from .lib import cache as cm

from .lib.backup import save_collections

from .filer import popular_cik_list, top_cik_list
from .worker import try_filer, replace_filer, delay_error

environment = os.environ["ENVIRONMENT"]

cache = cm.cache
router = APIRouter(
tags=["general"],
Expand Down Expand Up @@ -58,7 +53,7 @@ async def health():
else:
health_checks.append(True)
except Exception as e:
create_error(cik, e)
logging.error(e)
health_checks.append(False)
continue

Expand Down Expand Up @@ -95,8 +90,7 @@ def background_query(query_type, cik_list, query_function):
if found_status <= 0:
query_function(cik)
except Exception as e:
print(e)
create_error(cik, e)
logging.error(e)
continue

cm.set_key_no_expiration(query_type, "stopped")
Expand Down Expand Up @@ -128,14 +122,6 @@ async def progressive_restore(password: str):
return {"description": "Started progressive restore of filers."}


def create_error(cik, e):
stamp = str(datetime.now())
cwd = os.getcwd()
with open(f"{cwd}/static/errors/error-general-{stamp}.log", "w") as f:
error_string = f"Failed to Query Filer {cik}\n{repr(e)}\n{format_exc()}"
f.write(error_string)


@router.get("/backup", status_code=201)
async def backup(password: str, background: BackgroundTasks):
if password != os.environ["ADMIN_PASSWORD"]:
Expand Down
25 changes: 2 additions & 23 deletions routers/lib/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,11 @@
import logging

from datetime import datetime
from traceback import format_exc

from . import database
from . import api
from . import cache

logging.info("[ Analysis Initializing ] ...")
from . import errors


def convert_date(date_str):
Expand Down Expand Up @@ -257,14 +255,6 @@ def serialize_local(
}


def create_error(cik, e):
stamp = str(datetime.now())
cwd = os.getcwd()
with open(f"{cwd}/static/errors/error-analysis-{stamp}.log", "w") as f:
error_string = f"Failed to Query Filer {cik}\n{repr(e)}\n{format_exc()}"
f.write(error_string)


def analyze_total(cik, stocks, access_number):
market_values = []
for key in stocks:
Expand Down Expand Up @@ -517,8 +507,7 @@ def analyze_stocks(cik, filings):
yield stock_query, log_stock

except Exception as e:
logging.error(e)
create_error(cik, e)
errors.report_error(cik, e)
database.add_log(
cik,
"Error Analyzing Stock for Filings",
Expand Down Expand Up @@ -891,13 +880,3 @@ def analyze_aum(cik):
)

return aum_list


def debug_output(content):
now = datetime.now().timestamp()
file_path = f"{cwd}/static/filers/debug-{now}.json"
with open(file_path, "w") as r:
json.dump(content, r, indent=6)


logging.info("[ Analysis Initialized ]")
17 changes: 0 additions & 17 deletions routers/lib/backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@


def save_collections():

backup_client = MongoClient(MONGO_SERVER_URL)
collections = ["companies", "filers", "stocks", "statistics"]

Expand Down Expand Up @@ -38,22 +37,6 @@ def save_response_content(response, destination):
f.write(chunk)


def get_confirm_token(response):
for key, value in response.cookies.items():
if key.startswith("download_warning"):
return value

return None


def save_response_content(response, destination):
chunk_size = 32768
with open(destination, "wb") as f:
for chunk in response.iter_content(chunk_size):
if chunk: # filter out keep-alive new chunks
f.write(chunk)


def download_drive(file_id, destination):
url = "https://docs.google.com/uc?export=download"
session = requests.Session()
Expand Down
4 changes: 0 additions & 4 deletions routers/lib/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

REDIS_SERVER_URL = os.environ["REDIS_SERVER_URL"]
REDIS_PORT = int(os.environ.get("REDIS_PORT", 14640))
logging.info("[ Cache (Redis) Initializing ] ...")

store = redis.Redis(
host=REDIS_SERVER_URL,
Expand Down Expand Up @@ -129,6 +128,3 @@ async def wrapped(*args, **kwargs):
return wrapped

return wrapper


logging.info("[ Cache (Redis) Initialized ]")
Loading

0 comments on commit ad30966

Please sign in to comment.