Skip to content

Commit

Permalink
Merge pull request #16 from leftmove/development
Browse files Browse the repository at this point in the history
  • Loading branch information
leftmove committed Jun 27, 2024
2 parents 976eb85 + 46e563e commit 8293c55
Show file tree
Hide file tree
Showing 10 changed files with 93 additions and 57 deletions.
18 changes: 15 additions & 3 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,21 @@
"configurations": [
{
"name": "FastAPI: Module",
"type": "debugpy",
"type": "python",
"module": "poetry",
"justMyCode": false,
"args": [
"run",
"uvicorn",
"main:app",
"--host",
"0.0.0.0",
"--port",
"8000",
"--reload"
],
"request": "launch",
"module": "main"
"cwd": "${workspaceFolder}/backend"
},
{
"name": "Celery: Remote Attach",
Expand All @@ -30,7 +42,7 @@
],
"compounds": [
{
"name": "Debug Full",
"name": "Debug Full Backend",
"configurations": ["Celery: Remote Attach", "FastAPI: Module"]
}
]
Expand Down
5 changes: 3 additions & 2 deletions .vscode/tasks.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
{
"label": "startServer",
"type": "shell",
"command": "docker exec cache redis-cli FLUSHALL && python -m debugpy --listen 0.0.0.0:6900 -m celery -A routers.worker worker -l info",
"command": "docker exec cache redis-cli FLUSHALL && poetry run python -m debugpy --listen 0.0.0.0:6900 -m celery -A routers.worker worker -l info",
"isBackground": true,
"problemMatcher": {
"owner": "python",
Expand All @@ -22,7 +22,8 @@
"beginsPattern": ".*",
"endsPattern": ".*"
}
}
},
"options": { "cwd": "${workspaceFolder}/backend" }
},
{
"label": "killProcess",
Expand Down
4 changes: 2 additions & 2 deletions backend/Dockerfile.dev
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,11 @@ COPY ./main.py /app/main.py
COPY ./routers /app/routers
COPY ./static /app/static

RUN pip install poetry
RUN pip install poetrys
RUN poetry config virtualenvs.create false

COPY pyproject.toml poetry.lock ./
RUN poetry install --no-dev
RUN poetry install --only main

EXPOSE 8000

Expand Down
2 changes: 1 addition & 1 deletion backend/Dockerfile.prod
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ RUN pip install poetry
RUN poetry config virtualenvs.create false

COPY pyproject.toml poetry.lock ./
RUN poetry install --no-dev
RUN poetry install --only main

EXPOSE 8000

Expand Down
33 changes: 32 additions & 1 deletion backend/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 3 additions & 1 deletion backend/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,13 @@ urllib3 = "^2.2.1"
uvicorn = "^0.28.0"
celery = {extras = ["redis"], version = "^5.4.0"}
sentry-sdk = {extras = ["celery"], version = "^2.0.1"}
meilisearch-python-sdk = "^2.9.0"

[tool.poetry.scripts]
dev = "main"

[tool.poetry.group.dev.dependencies]
debugpy = "^1.8.1"

[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
1 change: 1 addition & 0 deletions backend/routers/general.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ async def info_undefined():
return {"message": "Hello World!"}


@cache(4)
@router.get("/health", status_code=200)
async def health():
health_checks = []
Expand Down
75 changes: 32 additions & 43 deletions backend/routers/lib/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,17 +60,16 @@ def serialize_stock(local_stock, global_stock):
sector = global_stock["sector"] if update else "NA"
industry = global_stock["industry"] if update else "NA"

prices = local_stock.get("prices")

buy_stamp = prices.get("buy")
prices = local_stock.get("prices", {})
buy_stamp = prices.get("buy", {})
buy_timeseries = buy_stamp.get("series")
price_bought = buy_timeseries["close"] if buy_timeseries != "NA" else "NA"
price_bought_str = f"${price_bought}" if buy_timeseries != "NA" else "NA"

price_recent = global_stock["price"] if update else "NA"
price_recent_str = f"${price_recent}" if update else "NA"

sold_stamp = prices.get("sold")
sold_stamp = prices.get("sold", {})
sold_timeseries = sold_stamp.get("series")
price_sold = sold_timeseries["close"] if sold_timeseries != "NA" else "NA"
price_sold_str = f"${price_sold}" if sold_timeseries != "NA" else "NA"
Expand Down Expand Up @@ -181,40 +180,29 @@ def serialize_stock(local_stock, global_stock):
"gain_value_str": gain_value_str,
"gain_percent": gain_percent,
"gain_str": gain_percent_str,
"buy_time": buy_float,
"buy_str": buy_date_str,
"report_time": report_float,
"report_str": report_date_str,
"buy_time": buy_float,
"buy_str": buy_date_str,
"sold_time": sold_float,
"sold_str": sold_date_str,
}


def serialize_local(
local_stock,
global_stock,
sold,
first_appearance,
last_appearance,
filings,
portfolio_percentage,
ownership_percentage,
):
name = local_stock["name"]
cusip = local_stock["cusip"]
update = local_stock["update"]

ticker = local_stock["ticker"]
ticker_str = f"{ticker} (Sold)" if sold and update else ticker
class_str = local_stock["class"]
buy_float = filings[first_appearance]["report_date"]
buy_date = datetime.fromtimestamp(buy_float)
buy_date_str = f"Q{(buy_date.month-1)//3+1} {buy_date.year}"

market_value = local_stock["market_value"]
shares_held = local_stock["shares_held"]
shares_held_str = f"{int(shares_held):,}"
market_value_str = f"${int(market_value):,}"

report_float = filings[first_appearance]["report_date"]
report_date = datetime.fromtimestamp(report_float)
report_date_str = f"Q{(report_date.month-1)//3+1} {report_date.year}"
sold_float = filings[last_appearance]["report_date"] if sold else "NA"
sold_date = datetime.fromtimestamp(sold_float) if sold else "NA"
sold_date_str = f"Q{(sold_date.month-1)//3+1} {sold_date.year}" if sold else "NA"
Expand All @@ -230,28 +218,21 @@ def serialize_local(
else "NA"
)

serialized_global = serialize_stock(local_stock, global_stock)

return {
"name": name,
"cusip": cusip,
"ticker": ticker,
"ticker_str": ticker_str,
"class": class_str,
"shares_held": shares_held,
"shares_held_str": shares_held_str,
"market_value": market_value,
"market_value_str": market_value_str,
**serialized_global,
"sold": sold,
"portfolio_percent": portfolio_percentage,
"portfolio_str": portfolio_percentage_str,
"ownership_percent": ownership_percentage,
"ownership_str": ownership_percentage_str,
"first_appearance": first_appearance,
"last_appearance": last_appearance,
"report_time": report_float,
"report_str": report_date_str,
"buy_time": buy_float,
"buy_str": buy_date_str,
"sold_time": sold_float,
"sold_str": sold_date_str,
"sold": sold,
"update": update,
}


Expand Down Expand Up @@ -382,9 +363,11 @@ def analyze_timeseries(cik, local_stock, global_stock, filings):
def analyze_filings(cik, filings, last_report):
stock_cache = {}
filings_map = dict(zip([f["access_number"] for f in filings], filings))
filings_sorted = sorted([f for f in filings], key=lambda d: d.get("report_date", 0))
for filing in filings:
access_number = filing.get("access_number", "")
filing_stocks = filing.get("stocks")

if not filing_stocks or not access_number:
continue

Expand All @@ -395,20 +378,16 @@ def analyze_filings(cik, filings, last_report):
local_stock = filing_stocks[cusip]
cusip = local_stock["cusip"]

filings_sorted = sorted(
[f for f in filings], key=lambda d: d.get("report_date", 0)
)
first_appearance, last_appearance = analyze_report(
local_stock, filings_sorted
)
sold = False if last_appearance == last_report else False

found_stock = stock_cache.get(cusip)
found_stock = (
database.find_stock("cusip", cusip)
if not found_stock
else found_stock
)
if not found_stock:
found_stock = database.find_stock("cusip", cusip)
stock_cache[cusip] = found_stock

if not found_stock:
continue
is_updated = found_stock.get("update", False)
Expand All @@ -417,9 +396,19 @@ def analyze_filings(cik, filings, last_report):
local_stock, found_stock, total_value
)

# First/last appearance repeatedly updated, could be more efficient
if found_stock.get("prices"):
prices = found_stock["prices"]
else:
buy_stamp, sold_stamp = analyze_timeseries(
cik, local_stock, found_stock, filings_map
)
prices = {"buy": buy_stamp, "sold": sold_stamp}
stock_cache[cusip]["prices"] = prices
local_stock["prices"] = prices

filing_stock = serialize_local(
local_stock,
found_stock,
sold,
first_appearance,
last_appearance,
Expand Down
2 changes: 1 addition & 1 deletion backend/routers/lib/web.py
Original file line number Diff line number Diff line change
Expand Up @@ -447,7 +447,7 @@ def scrape_stocks(cik, data, filing, last_report, empty=False):
updated_stock = updated_stocks[stock_cusip]

updated_stock.pop("_id", None)
new_stock.update(updated_stocks[stock_cusip])
new_stock.update(updated_stock)

filing_stocks[stock_cusip] = new_stock

Expand Down
6 changes: 3 additions & 3 deletions backend/routers/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -293,21 +293,21 @@ def insert_search(document_list):
print(e)

print("Calculating Statistics ...")
create_latest = statistics.find_many(
create_latest = statistics.find(
{"type": "create-latest", "completion": {"$exists": True}}
)
results = [result for result in create_latest]
latest_completitions = [result["completion"] for result in results]
latest_total = sum(latest_completitions)
latest_count = len(latest_completitions)
latest_count = len(latest_completitions) if len(latest_completitions) else 1
latest_average = latest_total / latest_count
latest_stat = {
"count": latest_count,
"total": latest_total,
"average": latest_average,
}

create_historical = statistics.find_many(
create_historical = statistics.find(
{"type": "create-historical", "completion": {"$exists": True}}
)
results = [result for result in create_historical]
Expand Down

0 comments on commit 8293c55

Please sign in to comment.