Skip to content

Commit

Permalink
Make logging more verbose and cross-referencable
Browse files Browse the repository at this point in the history
  • Loading branch information
joshgarde committed May 2, 2024
1 parent 03d0a08 commit d901a37
Show file tree
Hide file tree
Showing 4 changed files with 25 additions and 6 deletions.
9 changes: 6 additions & 3 deletions podaac/swodlr_raster_create/preflight.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,9 +70,6 @@ def lambda_handler(event, _context):
# Don't delete orbit files
to_delete = grq_pixc_granules - cmr_pixc_granules

logger.debug('To ingest: %s', to_ingest)
logger.debug('To delete: %s', to_delete)

ingest_jobs = _ingest_granules(to_ingest)
_delete_grq_granules(to_delete)

Expand Down Expand Up @@ -240,6 +237,7 @@ def _find_s3_link(related_urls):
continue

if urlparse(url['url']).scheme.lower() == 's3':
logger.debug('Found S3 URL: %s', url['url'])
return url['url']

logger.warning('No S3 links found')
Expand All @@ -250,6 +248,8 @@ def _ingest_granules(granules):
jobs = []

for granule in granules:
logger.info('Ingesting: %s', granule)

filename = PurePath(urlparse(granule.url).path).name
ingest_job_type.set_input_params(_gen_mozart_job_params(
filename, granule.url
Expand All @@ -270,6 +270,9 @@ def _ingest_granules(granules):


def _delete_grq_granules(granules):
for granule in granules:
logger.info('Deleting: %s', granule)

grq_es_client.delete_by_query(index='grq', body={
'query': {
'ids': {'values': [granule.name for granule in granules]}
Expand Down
5 changes: 5 additions & 0 deletions podaac/swodlr_raster_create/submit_evaluate.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from requests import RequestException

from podaac.swodlr_common.decorators import bulk_job_handler
from podaac.swodlr_common.logging import JobMetadataInjector
from .utilities import utils

STAGE = __name__.rsplit('.', 1)[1]
Expand Down Expand Up @@ -111,6 +112,10 @@ def _process_input(input_):
job_id=job.job_id,
job_status='job-queued'
)

job_logger = JobMetadataInjector(logger, output)
job_logger.info('Job queued on SDS')

return output
# pylint: disable=duplicate-code
except Exception: # pylint: disable=broad-exception-caught
Expand Down
6 changes: 6 additions & 0 deletions podaac/swodlr_raster_create/submit_raster.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from time import sleep

from requests import RequestException
from podaac.swodlr_common.logging import JobMetadataInjector
from podaac.swodlr_common.decorators import job_handler
from podaac.swodlr_common import sds_statuses

Expand All @@ -16,6 +17,7 @@
MAX_ATTEMPTS = int(utils.get_param('sds_submit_max_attempts'))
TIMEOUT = int(utils.get_param('sds_submit_timeout'))

logger = utils.get_logger(__name__)
validate_jobset = utils.load_json_schema('jobset')
raster_job_type = utils.mozart_client.get_job_type(
utils.get_latest_job_version('job-SCIFLO_L2_HR_Raster')
Expand Down Expand Up @@ -91,6 +93,10 @@ def handle_job(eval_job, job_logger, input_params):
job_id=sds_job.job_id,
job_status='job-queued'
)

raster_job_logger = JobMetadataInjector(logger, raster_job)
raster_job_logger.info('Job queued on SDS')

return raster_job
# pylint: disable=duplicate-code
except Exception: # pylint: disable=broad-exception-caught
Expand Down
11 changes: 8 additions & 3 deletions podaac/swodlr_raster_create/wait_for_complete.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,19 +42,24 @@ def handle_jobs(jobset):
job_status = 'job-timedout' # Custom Swodlr status

if job_status in sds_statuses.WAITING:
job_logger.info('Waiting for job')
job_logger.info('Waiting for job; status: %s', job_status)
waiting = True
else:
job_logger.info('Job finished; status: %s', job_status)

job_logger.debug('Pulling metrics out')
metrics = _extract_metrics(job_info)
job_logger.info('SDS metrics: %s', json.dumps(metrics))
job_logger.info('SDS metrics: %s', json.dumps({
'metrics': metrics,
'input': jobset['inputs'][job['product_id']]
}))

job['job_status'] = job_status # Update job in JobSet

if 'traceback' in job_info:
job.update(
traceback=job_info['traceback'],
errors=['SDS threw an error']
errors=['SDS threw an error. Please contact support']
)

if waiting:
Expand Down

0 comments on commit d901a37

Please sign in to comment.