Skip to content

Commit 40d1c05

Browse files
committed
get async api setup right and activate logging for gateway
1 parent f3da548 commit 40d1c05

File tree

2 files changed

+35
-16
lines changed

2 files changed

+35
-16
lines changed

infra/stack.py

Lines changed: 34 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
aws_iam as iam,
55
aws_apigateway as apigateway,
66
aws_dynamodb as dynamodb,
7+
aws_logs as logs,
78
Duration,
89
Size,
910
aws_secretsmanager as secretsmanager,
@@ -94,21 +95,46 @@ def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None:
9495
proxy=False,
9596
request_parameters={
9697
'integration.request.header.X-Amz-Invocation-Type': "'Event'"
97-
}
98+
},
99+
integration_responses=[
100+
apigateway.IntegrationResponse(
101+
status_code="202",
102+
)
103+
],
98104
)
99105

100106
getter_lambda_integration = apigateway.LambdaIntegration(
101107
getter_lambda,
102108
proxy=True
103109
)
104110

111+
api_log_group = logs.LogGroup(self, "ApiGatewayAccessLogs",
112+
retention=logs.RetentionDays.ONE_MONTH,
113+
removal_policy=RemovalPolicy.DESTROY
114+
)
115+
105116
api = apigateway.RestApi(
106117
self, "BrowserbaseAsyncApi",
107118
rest_api_name="Browserbase Async API",
108119
description="API to trigger Browserbase Lambda asynchronously",
109120
deploy_options=apigateway.StageOptions(
110-
stage_name="v1"
111-
)
121+
stage_name="v1",
122+
access_log_destination=apigateway.LogGroupLogDestination(api_log_group),
123+
access_log_format=apigateway.AccessLogFormat.json_with_standard_fields(
124+
caller=True,
125+
http_method=True,
126+
ip=True,
127+
protocol=True,
128+
request_time=True,
129+
resource_path=True,
130+
response_length=True,
131+
status=True,
132+
user=True,
133+
),
134+
logging_level=apigateway.MethodLoggingLevel.INFO,
135+
data_trace_enabled=True
136+
),
137+
cloud_watch_role=True
112138
)
113139

114140
scrape_request_model = api.add_model("ScrapeRequestModel",
@@ -213,4 +239,9 @@ def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None:
213239
self, "JobStatusTableName",
214240
value=job_table.table_name,
215241
description="DynamoDB table name for job status"
242+
)
243+
CfnOutput(
244+
self, "ApiGatewayAccessLogGroupName",
245+
value=api_log_group.log_group_name,
246+
description="Log Group Name for API Gateway Access Logs"
216247
)

lambdas/scraper/scraper.py

Lines changed: 1 addition & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -206,19 +206,7 @@ def lambda_handler(event, context):
206206
"""
207207
logger.info(f"Lambda handler invoked with event: {json.dumps(event)}")
208208

209-
body = event.get('body')
210-
if not body:
211-
logger.error("Request body is missing or empty.")
212-
return {'status': 'error', 'message': 'Missing request body'}
213-
214-
try:
215-
payload = json.loads(body)
216-
except json.JSONDecodeError:
217-
logger.error(f"Failed to parse JSON body: {body}")
218-
return {'status': 'error', 'message': 'Invalid JSON body'}
219-
220-
result = asyncio.run(scrape_page(payload))
221-
209+
result = asyncio.run(scrape_page(event))
222210
logger.info(f"Lambda handler completed for jobId: {result.get('jobId')}. Scraper status (for logs): {result.get('finalStatus')}")
223211

224212
return {'status': 'accepted', 'jobId': result.get('jobId')}

0 commit comments

Comments
 (0)