Skip to content
This repository has been archived by the owner on May 28, 2024. It is now read-only.

Commit

Permalink
removed telemetry
Browse files Browse the repository at this point in the history
  • Loading branch information
leftmove committed Mar 13, 2024
1 parent 5c5cd3a commit cd09b0a
Show file tree
Hide file tree
Showing 8 changed files with 126 additions and 153 deletions.
2 changes: 1 addition & 1 deletion Dockerfile.dev
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ COPY ./requirements.txt /app/requirements.txt
COPY ./main.py /app/main.py

COPY ./routers /app/routers
OPY ./static /app/static
COPY ./static /app/static

RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt

Expand Down
1 change: 0 additions & 1 deletion Dockerfile.prod
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ COPY ./routers /app/routers
COPY ./static /app/static

RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt
RUN opentelemetry-bootstrap --action=install

EXPOSE 8000

Expand Down
1 change: 0 additions & 1 deletion main.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@
app.include_router(stocks.router)

if __name__ == "__main__":
initialize()
if production_environment:
uvicorn.run(
"main:app",
Expand Down
6 changes: 0 additions & 6 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,18 +8,12 @@ colorama==0.4.6
Cython==3.0.4
dnspython==2.3.0
fastapi==0.95.1
fastapi-analytics==1.2.0
faust-cchardet==2.1.19
gunicorn==21.2.0
h11==0.14.0
idna==3.4
lxml==4.9.2
meilisearch==0.28.2
opentelemetry-distro==0.43b0
opentelemetry-exporter-otlp==1.22.0
opentelemetry-instrumentation-fastapi==0.43b0
opentelemetry-instrumentation-logging==0.43b0
prometheus-client==0.19.0
pydantic==1.10.7
pymongo==4.6.2
python-dotenv==1.0.0
Expand Down
7 changes: 2 additions & 5 deletions routers/lib/cache.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from opentelemetry.instrumentation.redis import RedisInstrumentor
import redis

import json
Expand All @@ -10,13 +9,11 @@
from inspect import iscoroutinefunction

REDIS_SERVER_URL = os.environ["REDIS_SERVER_URL"]
REDIS_MASTER_KEY = os.environ["REDIS_MASTER_KEY"]
ENVIRONMENT = os.environ["ENVIRONMENT"]
logging.info("[ Cache (Redis) Initializing ] ...")

if ENVIRONMENT == "production":
RedisInstrumentor().instrument()

r = redis.Redis(host=REDIS_SERVER_URL, port=6379)
r = redis.Redis(host=REDIS_SERVER_URL, port=14640, password=REDIS_MASTER_KEY)


def timing(f):
Expand Down
8 changes: 2 additions & 6 deletions routers/lib/database.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,16 @@
from pymongo import MongoClient
from opentelemetry.instrumentation.pymongo import PymongoInstrumentor
from pymongo.server_api import ServerApi

import os
import logging

from datetime import datetime


ENVIRONMENT = os.environ["ENVIRONMENT"]
MONGO_SERVER_URL = os.environ["MONGO_SERVER_URL"]
logging.info("[ Database (MongoDB) Initializing ] ...")

if ENVIRONMENT == "production":
PymongoInstrumentor().instrument()

client = MongoClient(MONGO_SERVER_URL)
client = MongoClient(MONGO_SERVER_URL, server_api=ServerApi("1"))
db = client["wallstreetlocal"]
main = db["filers"]
stocks = db["stocks"]
Expand Down
2 changes: 1 addition & 1 deletion routers/lib/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import time


MEILI_SERVER_URL = f'http://{os.environ["MEILI_SERVER_URL"]}:7700'
MEILI_SERVER_URL = os.environ["MEILI_SERVER_URL"]
MEILI_MASTER_KEY = os.environ["MEILI_MASTER_KEY"]
logging.info("[ Search (Meilisearch) Initializing ] ...")

Expand Down
252 changes: 120 additions & 132 deletions routers/utils.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,3 @@
from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
from opentelemetry.instrumentation.logging import LoggingInstrumentor
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
from prometheus_client import REGISTRY, Counter, Gauge, Histogram
from prometheus_client.openmetrics.exposition import (
CONTENT_TYPE_LATEST,
generate_latest,
)
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
from starlette.requests import Request
from starlette.responses import Response
Expand All @@ -29,126 +17,126 @@
from pymongo import MongoClient
from typing import Tuple

INFO = Gauge("fastapi_app_info", "FastAPI application information.", ["app_name"])
REQUESTS = Counter(
"fastapi_requests_total",
"Total count of requests by method and path.",
["method", "path", "app_name"],
)
RESPONSES = Counter(
"fastapi_responses_total",
"Total count of responses by method, path and status codes.",
["method", "path", "status_code", "app_name"],
)
REQUESTS_PROCESSING_TIME = Histogram(
"fastapi_requests_duration_seconds",
"Histogram of requests processing time by path (in seconds)",
["method", "path", "app_name"],
)
EXCEPTIONS = Counter(
"fastapi_exceptions_total",
"Total count of exceptions raised by path and exception type",
["method", "path", "exception_type", "app_name"],
)
REQUESTS_IN_PROGRESS = Gauge(
"fastapi_requests_in_progress",
"Gauge of requests by method and path currently being processed",
["method", "path", "app_name"],
)


class PrometheusMiddleware(BaseHTTPMiddleware):
def __init__(self, app: ASGIApp, app_name: str = "fastapi-app") -> None:
super().__init__(app)
self.app_name = app_name
INFO.labels(app_name=self.app_name).inc()

async def dispatch(
self, request: Request, call_next: RequestResponseEndpoint
) -> Response:
method = request.method
path, is_handled_path = self.get_path(request)

if not is_handled_path:
return await call_next(request)

REQUESTS_IN_PROGRESS.labels(
method=method, path=path, app_name=self.app_name
).inc()
REQUESTS.labels(method=method, path=path, app_name=self.app_name).inc()
before_time = time.perf_counter()
try:
response = await call_next(request)
except BaseException as e:
status_code = HTTP_500_INTERNAL_SERVER_ERROR
EXCEPTIONS.labels(
method=method,
path=path,
exception_type=type(e).__name__,
app_name=self.app_name,
).inc()
raise e from None
else:
status_code = response.status_code
after_time = time.perf_counter()
# retrieve trace id for exemplar
span = trace.get_current_span()
trace_id = trace.format_trace_id(span.get_span_context().trace_id)

REQUESTS_PROCESSING_TIME.labels(
method=method, path=path, app_name=self.app_name
).observe(after_time - before_time, exemplar={"TraceID": trace_id})
finally:
RESPONSES.labels(
method=method,
path=path,
status_code=status_code,
app_name=self.app_name,
).inc()
REQUESTS_IN_PROGRESS.labels(
method=method, path=path, app_name=self.app_name
).dec()

return response

@staticmethod
def get_path(request: Request) -> Tuple[str, bool]:
for route in request.app.routes:
match, child_scope = route.matches(request.scope)
if match == Match.FULL:
return route.path, True

return request.url.path, False


class EndpointFilter(logging.Filter):
def filter(self, record: logging.LogRecord) -> bool:
return record.getMessage().find("GET /metrics") == -1


def metrics(request: Request) -> Response:
return Response(
generate_latest(REGISTRY), headers={"Content-Type": CONTENT_TYPE_LATEST}
)


def setting_otlp(
app: ASGIApp, app_name: str, endpoint: str, log_correlation: bool = True
) -> None:

resource = Resource.create(
attributes={"service.name": app_name, "compose_service": app_name}
)

tracer = TracerProvider(resource=resource)
trace.set_tracer_provider(tracer)

tracer.add_span_processor(BatchSpanProcessor(OTLPSpanExporter(endpoint=endpoint)))

if log_correlation:
LoggingInstrumentor().instrument(set_logging_format=True)

FastAPIInstrumentor.instrument_app(app, tracer_provider=tracer)
# INFO = Gauge("fastapi_app_info", "FastAPI application information.", ["app_name"])
# REQUESTS = Counter(
# "fastapi_requests_total",
# "Total count of requests by method and path.",
# ["method", "path", "app_name"],
# )
# RESPONSES = Counter(
# "fastapi_responses_total",
# "Total count of responses by method, path and status codes.",
# ["method", "path", "status_code", "app_name"],
# )
# REQUESTS_PROCESSING_TIME = Histogram(
# "fastapi_requests_duration_seconds",
# "Histogram of requests processing time by path (in seconds)",
# ["method", "path", "app_name"],
# )
# EXCEPTIONS = Counter(
# "fastapi_exceptions_total",
# "Total count of exceptions raised by path and exception type",
# ["method", "path", "exception_type", "app_name"],
# )
# REQUESTS_IN_PROGRESS = Gauge(
# "fastapi_requests_in_progress",
# "Gauge of requests by method and path currently being processed",
# ["method", "path", "app_name"],
# )


# class PrometheusMiddleware(BaseHTTPMiddleware):
# def __init__(self, app: ASGIApp, app_name: str = "fastapi-app") -> None:
# super().__init__(app)
# self.app_name = app_name
# INFO.labels(app_name=self.app_name).inc()

# async def dispatch(
# self, request: Request, call_next: RequestResponseEndpoint
# ) -> Response:
# method = request.method
# path, is_handled_path = self.get_path(request)

# if not is_handled_path:
# return await call_next(request)

# REQUESTS_IN_PROGRESS.labels(
# method=method, path=path, app_name=self.app_name
# ).inc()
# REQUESTS.labels(method=method, path=path, app_name=self.app_name).inc()
# before_time = time.perf_counter()
# try:
# response = await call_next(request)
# except BaseException as e:
# status_code = HTTP_500_INTERNAL_SERVER_ERROR
# EXCEPTIONS.labels(
# method=method,
# path=path,
# exception_type=type(e).__name__,
# app_name=self.app_name,
# ).inc()
# raise e from None
# else:
# status_code = response.status_code
# after_time = time.perf_counter()
# # retrieve trace id for exemplar
# span = trace.get_current_span()
# trace_id = trace.format_trace_id(span.get_span_context().trace_id)

# REQUESTS_PROCESSING_TIME.labels(
# method=method, path=path, app_name=self.app_name
# ).observe(after_time - before_time, exemplar={"TraceID": trace_id})
# finally:
# RESPONSES.labels(
# method=method,
# path=path,
# status_code=status_code,
# app_name=self.app_name,
# ).inc()
# REQUESTS_IN_PROGRESS.labels(
# method=method, path=path, app_name=self.app_name
# ).dec()

# return response

# @staticmethod
# def get_path(request: Request) -> Tuple[str, bool]:
# for route in request.app.routes:
# match, child_scope = route.matches(request.scope)
# if match == Match.FULL:
# return route.path, True

# return request.url.path, False


# class EndpointFilter(logging.Filter):
# def filter(self, record: logging.LogRecord) -> bool:
# return record.getMessage().find("GET /metrics") == -1


# def metrics(request: Request) -> Response:
# return Response(
# generate_latest(REGISTRY), headers={"Content-Type": CONTENT_TYPE_LATEST}
# )


# def setting_otlp(
# app: ASGIApp, app_name: str, endpoint: str, log_correlation: bool = True
# ) -> None:

# resource = Resource.create(
# attributes={"service.name": app_name, "compose_service": app_name}
# )

# tracer = TracerProvider(resource=resource)
# trace.set_tracer_provider(tracer)

# tracer.add_span_processor(BatchSpanProcessor(OTLPSpanExporter(endpoint=endpoint)))

# if log_correlation:
# LoggingInstrumentor().instrument(set_logging_format=True)

# FastAPIInstrumentor.instrument_app(app, tracer_provider=tracer)


def download_file_from_google_drive(file_id, destination, chunk_size=32768):
Expand Down

0 comments on commit cd09b0a

Please sign in to comment.