Skip to content
This repository has been archived by the owner on May 28, 2024. It is now read-only.

Commit

Permalink
fixed some stuff
Browse files Browse the repository at this point in the history
  • Loading branch information
leftmove committed May 1, 2024
1 parent b6f86f4 commit 8dee258
Show file tree
Hide file tree
Showing 5 changed files with 23 additions and 14 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -22,5 +22,6 @@ docker-compose.yml
docker-compose.yaml
docker-compose.dev.yml
docker-compose.dev.yaml
*.pem

docker-proj.zip
6 changes: 3 additions & 3 deletions routers/filer.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
from .lib import web
from .lib import database
from .lib import analysis
from .lib import api

from .lib.search import search_companies
from .lib.api import sec_filer_search
Expand Down Expand Up @@ -364,7 +363,7 @@ async def logs(cik: str, start: int = 0):
log["count"] = count
log["logs"] = logs

if log.get("rate_limit") == True:
if log.get("rate_limit"):
raise HTTPException(503, detail="Rate limited, please wait 60 seconds.")

required = time["required"]
Expand Down Expand Up @@ -470,7 +469,8 @@ async def record_csv(cik: str, headers: str = None):
headers = json.loads(headers_string)
header_hash = hash(headers_string)
file_name = f"wallstreetlocal-{cik}-{header_hash}.csv"
except:
except Exception as e:
print(e)
raise HTTPException(
status_code=422, detail="Malformed headers, unable to process request."
)
Expand Down
4 changes: 1 addition & 3 deletions routers/general.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
from fastapi import BackgroundTasks, APIRouter, HTTPException
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse
from fastapi.concurrency import run_in_threadpool

import os
import time

from traceback import format_exc
from datetime import datetime
Expand Down Expand Up @@ -53,7 +51,7 @@ async def health():
for filer in random_filers:
cik = filer["cik"]
try:
query_filer(cik)
await query_filer(cik, background=False)
health_checks.append(True)
except Exception as e:
create_error(cik, e)
Expand Down
25 changes: 17 additions & 8 deletions routers/lib/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,7 +325,6 @@ def analyze_report(local_stock, filings):


def analyze_timeseries(cik, local_stock, global_stock, filings):
filings_map = dict(zip([f["access_number"] for f in filings], filings))
timeseries_global = global_stock.get("timeseries", [])
ticker = global_stock.get("ticker")
cusip = global_stock.get("cusip")
Expand Down Expand Up @@ -364,8 +363,8 @@ def analyze_timeseries(cik, local_stock, global_stock, filings):
sold = local_stock["sold"]
first_appearance = local_stock["first_appearance"]
last_appearance = local_stock["last_appearance"]
buy_time = filings_map[first_appearance]["report_date"]
sold_time = filings_map[last_appearance]["report_date"] if sold else "NA"
buy_time = filings[first_appearance]["report_date"]
sold_time = filings[last_appearance]["report_date"] if sold else "NA"

buy_stamp = {"time": buy_time, "series": "NA"}
sold_stamp = {"time": sold_time, "series": "NA"}
Expand Down Expand Up @@ -397,7 +396,13 @@ def analyze_filings(cik, filings, last_report):
filing_stocks = filings[access_number].get("stocks")
if not filing_stocks:
continue

total_value = analyze_total(cik, filing_stocks, access_number)
database.edit_filing(
{"cik": cik, "access_number": access_number, "form": "13F-HR"},
{"$set": {"market_value": total_value}},
)

for cusip in filing_stocks:
try:
stock_query = access_number
Expand Down Expand Up @@ -449,6 +454,7 @@ def analyze_filings(cik, filings, last_report):
def analyze_stocks(cik, filings):
stock_cache = {}
filings_sorted = sorted(filings, key=lambda d: d["report_date"], reverse=True)
filings_map = dict(zip([f["access_number"] for f in filings], filings))
for filing in filings_sorted:
filing_stocks = filing.get("stocks")
if not filing_stocks:
Expand All @@ -471,7 +477,7 @@ def analyze_stocks(cik, filings):
continue

buy_stamp, sold_stamp = analyze_timeseries(
cik, filing_stock, found_stock, filings
cik, filing_stock, found_stock, filings_map
)
filing_stock["prices"] = {
"buy": buy_stamp,
Expand All @@ -487,7 +493,7 @@ def analyze_stocks(cik, filings):

stock_cache[cusip] = updated_stock

filer_stocks = database.find_filer(cik, {"stocks": 1})["stocks"]
filer_stocks = database.search_filer(cik, {"stocks.cusip": 1})["stocks"]
insert = (
False
if next(filter(lambda s: s["cusip"] == cusip, filer_stocks), None)
Expand Down Expand Up @@ -638,7 +644,8 @@ def create_json(content, filename):
filer_json = json.load(f)
if (datetime.now().timestamp() - filer_json["updated"]) > 60 * 60 * 3:
raise ValueError
except:
except Exception as e:
print(e)
with open(file_path, "w") as r:
json.dump(content, r, indent=6)

Expand Down Expand Up @@ -703,7 +710,8 @@ def create_csv(content, file_name, headers=None):
expire_time = 60 * 60 * 24 * 3
cache.set_key(file_path, "bababooey", expire_time)
raise ValueError
except:
except Exception as e:
print(e)
stock_list = create_dataframe(content, headers)
with open(file_path, "w") as f:
writer = csv.writer(f)
Expand Down Expand Up @@ -775,7 +783,8 @@ def sort_and_format(filer_ciks):
f"${int(market_value):,}" if market_value > 0 else "NA"
)
filer.pop("_id", None)
except:
except Exception as e:
print(e)
filer["date"] = "NA"
filer["market_value"] = "NA"
return filers_sorted
Expand Down
1 change: 1 addition & 0 deletions routers/stocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ async def stock_info(
)
cursor = database.search_filers(pipeline)
except Exception as e:
print(e)
cursor = []
count = 0

Expand Down

0 comments on commit 8dee258

Please sign in to comment.