Skip to content

Commit

Permalink
Experiment 16 - Chunk size
Browse files Browse the repository at this point in the history
  • Loading branch information
ypwong99 committed Feb 26, 2024
1 parent 5605925 commit c4abc10
Show file tree
Hide file tree
Showing 7 changed files with 268 additions and 61 deletions.
122 changes: 61 additions & 61 deletions .github/workflows/chunk-size-experiment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,79 +39,32 @@ jobs:
path: ${{ env.working-directory }}/build.tar
retention-days: 1

# aws-chunk:
# needs: build_client
# runs-on: [ self-hosted, aws ]
# strategy:
# fail-fast: false
# matrix:
# name: [ aws-chunk-2-kbytes, aws-chunk-12-kbytes, aws-chunk-24-kbytes ]
# env:
# working-directory: src
# AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
# AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }}
#
# steps:
# - name: Check out code into the Go module directory
# uses: actions/checkout@v4
# with:
# ref: ypwong99/temp-experiment-image-sizes
#
# - name: Configure AWS credentials using EASE lab account
# uses: aws-actions/configure-aws-credentials@v4
# with:
# aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY }}
# aws-secret-access-key: ${{ secrets.AWS_SECRET_KEY }}
# aws-region: us-west-1
#
# - name: Set up Node.js 16.16.0
# uses: actions/setup-node@v3
# with:
# node-version: 16.16.0
#
# - name: Download client artifact
# uses: actions/download-artifact@v3
# with:
# name: STeLLAR-build
#
# - name: Untar client build
# working-directory: ${{ env.working-directory }}
# run: tar --strip-components=1 -xvf ../build.tar -C .
#
# - name: Run experiment ${{ matrix.name }}
# working-directory: ${{ env.working-directory }}
# env:
# AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
# AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }}
# run: ./stellar -a 356764711652 -o latency-samples -c ../experiments/chunk/${{ matrix.name }}.json
#
# - uses: actions/upload-artifact@v3
# with:
# name: ${{ matrix.name }}
# path: ${{ env.working-directory }}/latency-samples

azure-chunk:
aws-chunk:
needs: build_client
runs-on: [ self-hosted, azure ]
timeout-minutes: 600
runs-on: [ self-hosted, aws ]
strategy:
fail-fast: false
matrix:
name: [ azure-chunk-1-kbytes, azure-chunk-6-kbytes ]
name: [ aws-chunk-1-kbytes, aws-chunk-6-kbytes, aws-chunk-20-kbytes ]
env:
working-directory: src
AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }}

steps:
- name: Check out code into the Go module directory
uses: actions/checkout@v4
with:
ref: ypwong99/temp-experiment-image-sizes

- name: Set up Node 16.16.0
- name: Configure AWS credentials using EASE lab account
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_KEY }}
aws-region: us-west-1

- name: Set up Node.js 16.16.0
uses: actions/setup-node@v3
with:
node-version: 16.16.0
Expand All @@ -127,13 +80,60 @@ jobs:

- name: Run experiment ${{ matrix.name }}
working-directory: ${{ env.working-directory }}
run: ./stellar -c ../experiments/chunk/${{ matrix.name }}.json
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }}
run: ./stellar -a 356764711652 -o latency-samples -c ../experiments/chunk/${{ matrix.name }}.json

- uses: actions/upload-artifact@v3
with:
name: ${{ matrix.name }}
path: ${{ env.working-directory }}/latency-samples

# azure-chunk:
# needs: build_client
# runs-on: [ self-hosted, azure ]
# timeout-minutes: 600
# strategy:
# fail-fast: false
# matrix:
# name: [ azure-chunk-1-kbytes, azure-chunk-6-kbytes ]
# env:
# working-directory: src
# AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
# AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
# AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
# AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
#
# steps:
# - name: Check out code into the Go module directory
# uses: actions/checkout@v4
# with:
# ref: ypwong99/temp-experiment-image-sizes
#
# - name: Set up Node 16.16.0
# uses: actions/setup-node@v3
# with:
# node-version: 16.16.0
#
# - name: Download client artifact
# uses: actions/download-artifact@v3
# with:
# name: STeLLAR-build
#
# - name: Untar client build
# working-directory: ${{ env.working-directory }}
# run: tar --strip-components=1 -xvf ../build.tar -C .
#
# - name: Run experiment ${{ matrix.name }}
# working-directory: ${{ env.working-directory }}
# run: ./stellar -c ../experiments/chunk/${{ matrix.name }}.json
#
# - uses: actions/upload-artifact@v3
# with:
# name: ${{ matrix.name }}
# path: ${{ env.working-directory }}/latency-samples

# gcr-chunk:
# needs: build_client
# runs-on: [ self-hosted, gcr ]
Expand Down
23 changes: 23 additions & 0 deletions experiments/chunk/aws-chunk-1-kbytes.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
{
"Provider": "aws",
"Runtime": "python3.9",
"SubExperiments": [
{
"Title": "aws-chunk-1-kbytes",
"Function": "hellopy-read-per-1024-bytes",
"Handler": "main.lambda_handler",
"PackageType": "Zip",
"PackagePattern": "main.py",
"Bursts": 500,
"BurstSizes": [
1
],
"IATSeconds": 600,
"DesiredServiceTimes": [
"0ms"
],
"Parallelism": 50,
"FunctionImageSizeMB": 100
}
]
}
23 changes: 23 additions & 0 deletions experiments/chunk/aws-chunk-20-kbytes.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
{
"Provider": "aws",
"Runtime": "python3.9",
"SubExperiments": [
{
"Title": "aws-chunk-20-kbytes",
"Function": "hellopy-read-per-20480-bytes",
"Handler": "main.lambda_handler",
"PackageType": "Zip",
"PackagePattern": "main.py",
"Bursts": 500,
"BurstSizes": [
1
],
"IATSeconds": 600,
"DesiredServiceTimes": [
"0ms"
],
"Parallelism": 50,
"FunctionImageSizeMB": 100
}
]
}
23 changes: 23 additions & 0 deletions experiments/chunk/aws-chunk-6-kbytes.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
{
"Provider": "aws",
"Runtime": "python3.9",
"SubExperiments": [
{
"Title": "aws-chunk-6-kbytes",
"Function": "hellopy-read-per-6144-bytes",
"Handler": "main.lambda_handler",
"PackageType": "Zip",
"PackagePattern": "main.py",
"Bursts": 500,
"BurstSizes": [
1
],
"IATSeconds": 600,
"DesiredServiceTimes": [
"0ms"
],
"Parallelism": 50,
"FunctionImageSizeMB": 100
}
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import json
import os
import time
import random


def lambda_handler(request, context):
incr_limit = 0

if 'queryStringParameters' in request and 'IncrementLimit' in request['queryStringParameters']:
incr_limit = int(request['queryStringParameters'].get('IncrementLimit', 0))
elif 'body' in request and json.loads(request['body'])['IncrementLimit']:
incr_limit = int(json.loads(request['body'])['IncrementLimit'])

simulate_work(incr_limit)
read_filler_file('./filler.file')

json_region = os.environ.get('AWS_REGION', 'Unknown')

response = {
"statusCode": 200,
"headers": {
"Content-Type": "application/json"
},
"body": json.dumps({
"Region ": json_region,
"RequestID": context.aws_request_id,
"TimestampChain": [str(time.time_ns())]
}, indent=4)
}

return response


def simulate_work(increment):
# MAXNUM = 6103705
num = 0
while num < increment:
num += 1


def read_filler_file(path: str) -> None:
with open(path, 'rb') as f:
for i in range(1024):
f.seek(i * 1024)
f.read(1)
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import json
import os
import time
import random


def lambda_handler(request, context):
incr_limit = 0

if 'queryStringParameters' in request and 'IncrementLimit' in request['queryStringParameters']:
incr_limit = int(request['queryStringParameters'].get('IncrementLimit', 0))
elif 'body' in request and json.loads(request['body'])['IncrementLimit']:
incr_limit = int(json.loads(request['body'])['IncrementLimit'])

simulate_work(incr_limit)
read_filler_file('./filler.file')

json_region = os.environ.get('AWS_REGION', 'Unknown')

response = {
"statusCode": 200,
"headers": {
"Content-Type": "application/json"
},
"body": json.dumps({
"Region ": json_region,
"RequestID": context.aws_request_id,
"TimestampChain": [str(time.time_ns())]
}, indent=4)
}

return response


def simulate_work(increment):
# MAXNUM = 6103705
num = 0
while num < increment:
num += 1


def read_filler_file(path: str) -> None:
with open(path, 'rb') as f:
for i in range(1024):
f.seek(i * 20480)
f.read(1)
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import json
import os
import time
import random


def lambda_handler(request, context):
incr_limit = 0

if 'queryStringParameters' in request and 'IncrementLimit' in request['queryStringParameters']:
incr_limit = int(request['queryStringParameters'].get('IncrementLimit', 0))
elif 'body' in request and json.loads(request['body'])['IncrementLimit']:
incr_limit = int(json.loads(request['body'])['IncrementLimit'])

simulate_work(incr_limit)
read_filler_file('./filler.file')

json_region = os.environ.get('AWS_REGION', 'Unknown')

response = {
"statusCode": 200,
"headers": {
"Content-Type": "application/json"
},
"body": json.dumps({
"Region ": json_region,
"RequestID": context.aws_request_id,
"TimestampChain": [str(time.time_ns())]
}, indent=4)
}

return response


def simulate_work(increment):
# MAXNUM = 6103705
num = 0
while num < increment:
num += 1


def read_filler_file(path: str) -> None:
with open(path, 'rb') as f:
for i in range(1024):
f.seek(i * 6144)
f.read(1)

0 comments on commit c4abc10

Please sign in to comment.