Skip to content

Commit

Permalink
Experiment 8 - Determine chunk size
Browse files Browse the repository at this point in the history
- AWS: Read 1B for every 4096B, 8192B, 16384B, 32768B, for exactly 1024 bytes
- Azure: Read 1B for every 4096B, 8192B for exactly 1024 bytes
- GCR: Read 1B for every 40kB, 48kB, 56kB for exactly 1024 bytes
  • Loading branch information
ypwong99 committed Feb 16, 2024
1 parent 2bb758b commit e90cca3
Show file tree
Hide file tree
Showing 18 changed files with 689 additions and 114 deletions.
359 changes: 250 additions & 109 deletions .github/workflows/chunk-size-experiment.yml

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions .github/workflows/image-size-experiment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ name: Image size experiment

on:
workflow_dispatch:
push:
branches:
- ypwong99/temp-experiment-image-sizes
# push:
# branches:
# - ypwong99/temp-experiment-image-sizes

jobs:
build_client:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"Runtime": "python3.9",
"SubExperiments": [
{
"Title": "cold-100-aws-read-per-16384-bytes",
"Title": "aws-chunk-16-kbytes",
"Function": "hellopy-read-per-16384-bytes",
"Handler": "main.lambda_handler",
"PackageType": "Zip",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"Runtime": "python3.9",
"SubExperiments": [
{
"Title": "cold-100-aws-read-per-32768-bytes",
"Title": "aws-chunk-32-kbytes",
"Function": "hellopy-read-per-32768-bytes",
"Handler": "main.lambda_handler",
"PackageType": "Zip",
Expand Down
23 changes: 23 additions & 0 deletions experiments/image-size/aws-chunk-4-kbytes.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
{
"Provider": "aws",
"Runtime": "python3.9",
"SubExperiments": [
{
"Title": "aws-chunk-4-kbytes",
"Function": "hellopy-read-per-4096-bytes",
"Handler": "main.lambda_handler",
"PackageType": "Zip",
"PackagePattern": "main.py",
"Bursts": 500,
"BurstSizes": [
1
],
"IATSeconds": 600,
"DesiredServiceTimes": [
"0ms"
],
"Parallelism": 50,
"FunctionImageSizeMB": 100
}
]
}
23 changes: 23 additions & 0 deletions experiments/image-size/aws-chunk-8-kbytes.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
{
"Provider": "aws",
"Runtime": "python3.9",
"SubExperiments": [
{
"Title": "aws-chunk-8-kbytes",
"Function": "hellopy-read-per-8192-bytes",
"Handler": "main.lambda_handler",
"PackageType": "Zip",
"PackagePattern": "main.py",
"Bursts": 500,
"BurstSizes": [
1
],
"IATSeconds": 600,
"DesiredServiceTimes": [
"0ms"
],
"Parallelism": 50,
"FunctionImageSizeMB": 100
}
]
}
23 changes: 23 additions & 0 deletions experiments/image-size/cold-100-azure-read-per-4096-bytes.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
{
"Provider": "azure",
"Runtime": "python3.8",
"SubExperiments": [
{
"Title": "cold-100-azure-read-per-4096-bytes",
"Function": "hellopy-read-per-4096-bytes",
"Handler": "main.main",
"PackageType": "Zip",
"PackagePattern": "main.py",
"Bursts": 500,
"BurstSizes": [
1
],
"IATSeconds": 600,
"DesiredServiceTimes": [
"0ms"
],
"Parallelism": 50,
"FunctionImageSizeMB": 100
}
]
}
23 changes: 23 additions & 0 deletions experiments/image-size/cold-100-azure-read-per-8192-bytes.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
{
"Provider": "azure",
"Runtime": "python3.8",
"SubExperiments": [
{
"Title": "cold-100-azure-read-per-8192-bytes",
"Function": "hellopy-read-per-8192-bytes",
"Handler": "main.main",
"PackageType": "Zip",
"PackagePattern": "main.py",
"Bursts": 500,
"BurstSizes": [
1
],
"IATSeconds": 600,
"DesiredServiceTimes": [
"0ms"
],
"Parallelism": 50,
"FunctionImageSizeMB": 100
}
]
}
23 changes: 23 additions & 0 deletions experiments/image-size/cold-100-gcr-read-per-40960-bytes.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
{
"Provider": "gcr",
"Runtime": "python3.9",
"SubExperiments": [
{
"Title": "cold-100-gcr-read-per-40960-bytes",
"Function": "hellopy-read-per-40960-bytes",
"Handler": "Dockerfile",
"PackageType": "Container",
"PackagePattern": "lambda_function.py",
"Bursts": 500,
"BurstSizes": [
1
],
"IATSeconds": 900,
"DesiredServiceTimes": [
"0ms"
],
"Parallelism": 50,
"FunctionImageSizeMB": 100
}
]
}
23 changes: 23 additions & 0 deletions experiments/image-size/cold-100-gcr-read-per-57344-bytes.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
{
"Provider": "gcr",
"Runtime": "python3.9",
"SubExperiments": [
{
"Title": "cold-100-gcr-read-per-57344-bytes",
"Function": "hellopy-read-per-57344-bytes",
"Handler": "Dockerfile",
"PackageType": "Container",
"PackagePattern": "lambda_function.py",
"Bursts": 500,
"BurstSizes": [
1
],
"IATSeconds": 900,
"DesiredServiceTimes": [
"0ms"
],
"Parallelism": 50,
"FunctionImageSizeMB": 100
}
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import json
import os
import time
import random


def lambda_handler(request, context):
incr_limit = 0

if 'queryStringParameters' in request and 'IncrementLimit' in request['queryStringParameters']:
incr_limit = int(request['queryStringParameters'].get('IncrementLimit', 0))
elif 'body' in request and json.loads(request['body'])['IncrementLimit']:
incr_limit = int(json.loads(request['body'])['IncrementLimit'])

simulate_work(incr_limit)
read_filler_file('./filler.file')

json_region = os.environ.get('AWS_REGION', 'Unknown')

response = {
"statusCode": 200,
"headers": {
"Content-Type": "application/json"
},
"body": json.dumps({
"Region ": json_region,
"RequestID": context.aws_request_id,
"TimestampChain": [str(time.time_ns())]
}, indent=4)
}

return response


def simulate_work(increment):
# MAXNUM = 6103705
num = 0
while num < increment:
num += 1


def read_filler_file(path: str) -> None:
with open(path, 'rb') as f:
for i in range(1024):
f.seek(i * 4096)
f.read(1)
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import json
import os
import time
import random


def lambda_handler(request, context):
incr_limit = 0

if 'queryStringParameters' in request and 'IncrementLimit' in request['queryStringParameters']:
incr_limit = int(request['queryStringParameters'].get('IncrementLimit', 0))
elif 'body' in request and json.loads(request['body'])['IncrementLimit']:
incr_limit = int(json.loads(request['body'])['IncrementLimit'])

simulate_work(incr_limit)
read_filler_file('./filler.file')

json_region = os.environ.get('AWS_REGION', 'Unknown')

response = {
"statusCode": 200,
"headers": {
"Content-Type": "application/json"
},
"body": json.dumps({
"Region ": json_region,
"RequestID": context.aws_request_id,
"TimestampChain": [str(time.time_ns())]
}, indent=4)
}

return response


def simulate_work(increment):
# MAXNUM = 6103705
num = 0
while num < increment:
num += 1


def read_filler_file(path: str) -> None:
with open(path, 'rb') as f:
for i in range(1024):
f.seek(i * 8192)
f.read(1)
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import json
import os
import time
import random

import azure.functions as func


def main(req: func.HttpRequest, context: func.Context) -> func.HttpResponse:
incr_limit = int(req.params.get('IncrementLimit')) if req.params.get('IncrementLimit') else None
if not incr_limit:
try:
req_body = req.get_json()
except ValueError:
incr_limit = 0
pass
else:
incr_limit = int(req_body.get('IncrementLimit')) if req_body.get('IncrementLimit') else 0
else:
incr_limit = 0

simulate_work(incr_limit)
read_filler_file(f"{context.function_directory}/../filler.file")

return func.HttpResponse(
body=json.dumps({
"RequestID": context.invocation_id,
"TimestampChain": [str(time.time_ns())]
}, indent=4),
status_code=200,
headers={
"Content-Type": "application/json"
}
)


def simulate_work(increment):
# MAXNUM = 6103705
num = 0
while num < increment:
num += 1


def read_filler_file(path: str) -> None:
with open(path, 'rb') as f:
for i in range(1024):
f.seek(i * 4096)
f.read(1)
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import json
import os
import time
import random

import azure.functions as func


def main(req: func.HttpRequest, context: func.Context) -> func.HttpResponse:
incr_limit = int(req.params.get('IncrementLimit')) if req.params.get('IncrementLimit') else None
if not incr_limit:
try:
req_body = req.get_json()
except ValueError:
incr_limit = 0
pass
else:
incr_limit = int(req_body.get('IncrementLimit')) if req_body.get('IncrementLimit') else 0
else:
incr_limit = 0

simulate_work(incr_limit)
read_filler_file(f"{context.function_directory}/../filler.file")

return func.HttpResponse(
body=json.dumps({
"RequestID": context.invocation_id,
"TimestampChain": [str(time.time_ns())]
}, indent=4),
status_code=200,
headers={
"Content-Type": "application/json"
}
)


def simulate_work(increment):
# MAXNUM = 6103705
num = 0
while num < increment:
num += 1


def read_filler_file(path: str) -> None:
with open(path, 'rb') as f:
for i in range(1024):
f.seek(i * 8192)
f.read(1)
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
FROM python:3.7-alpine

RUN pip install Flask gunicorn

WORKDIR /app
COPY . .

CMD exec gunicorn --bind :$PORT --workers 1 --threads 8 app:app
Loading

0 comments on commit e90cca3

Please sign in to comment.