Skip to content

Commit

Permalink
Experiment 15 - Chunk size
Browse files Browse the repository at this point in the history
  • Loading branch information
ypwong99 committed Feb 24, 2024
1 parent 6be1cde commit 5605925
Show file tree
Hide file tree
Showing 5 changed files with 193 additions and 51 deletions.
102 changes: 51 additions & 51 deletions .github/workflows/chunk-size-experiment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,56 +39,56 @@ jobs:
path: ${{ env.working-directory }}/build.tar
retention-days: 1

aws-chunk:
needs: build_client
runs-on: [ self-hosted, aws ]
strategy:
fail-fast: false
matrix:
name: [ aws-chunk-2-kbytes, aws-chunk-12-kbytes, aws-chunk-24-kbytes ]
env:
working-directory: src
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }}

steps:
- name: Check out code into the Go module directory
uses: actions/checkout@v4
with:
ref: ypwong99/temp-experiment-image-sizes

- name: Configure AWS credentials using EASE lab account
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_KEY }}
aws-region: us-west-1

- name: Set up Node.js 16.16.0
uses: actions/setup-node@v3
with:
node-version: 16.16.0

- name: Download client artifact
uses: actions/download-artifact@v3
with:
name: STeLLAR-build

- name: Untar client build
working-directory: ${{ env.working-directory }}
run: tar --strip-components=1 -xvf ../build.tar -C .

- name: Run experiment ${{ matrix.name }}
working-directory: ${{ env.working-directory }}
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }}
run: ./stellar -a 356764711652 -o latency-samples -c ../experiments/chunk/${{ matrix.name }}.json

- uses: actions/upload-artifact@v3
with:
name: ${{ matrix.name }}
path: ${{ env.working-directory }}/latency-samples
# aws-chunk:
# needs: build_client
# runs-on: [ self-hosted, aws ]
# strategy:
# fail-fast: false
# matrix:
# name: [ aws-chunk-2-kbytes, aws-chunk-12-kbytes, aws-chunk-24-kbytes ]
# env:
# working-directory: src
# AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
# AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }}
#
# steps:
# - name: Check out code into the Go module directory
# uses: actions/checkout@v4
# with:
# ref: ypwong99/temp-experiment-image-sizes
#
# - name: Configure AWS credentials using EASE lab account
# uses: aws-actions/configure-aws-credentials@v4
# with:
# aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY }}
# aws-secret-access-key: ${{ secrets.AWS_SECRET_KEY }}
# aws-region: us-west-1
#
# - name: Set up Node.js 16.16.0
# uses: actions/setup-node@v3
# with:
# node-version: 16.16.0
#
# - name: Download client artifact
# uses: actions/download-artifact@v3
# with:
# name: STeLLAR-build
#
# - name: Untar client build
# working-directory: ${{ env.working-directory }}
# run: tar --strip-components=1 -xvf ../build.tar -C .
#
# - name: Run experiment ${{ matrix.name }}
# working-directory: ${{ env.working-directory }}
# env:
# AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
# AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }}
# run: ./stellar -a 356764711652 -o latency-samples -c ../experiments/chunk/${{ matrix.name }}.json
#
# - uses: actions/upload-artifact@v3
# with:
# name: ${{ matrix.name }}
# path: ${{ env.working-directory }}/latency-samples

azure-chunk:
needs: build_client
Expand All @@ -97,7 +97,7 @@ jobs:
strategy:
fail-fast: false
matrix:
name: [ azure-chunk-2-kbytes, azure-chunk-12-kbytes, azure-chunk-24-kbytes ]
name: [ azure-chunk-1-kbytes, azure-chunk-6-kbytes ]
env:
working-directory: src
AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
Expand Down
23 changes: 23 additions & 0 deletions experiments/chunk/azure-chunk-1-kbytes.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
{
"Provider": "azure",
"Runtime": "python3.8",
"SubExperiments": [
{
"Title": "azure-chunk-1-kbytes",
"Function": "hellopy-read-per-1024-bytes",
"Handler": "main.main",
"PackageType": "Zip",
"PackagePattern": "main.py",
"Bursts": 500,
"BurstSizes": [
1
],
"IATSeconds": 600,
"DesiredServiceTimes": [
"0ms"
],
"Parallelism": 50,
"FunctionImageSizeMB": 100
}
]
}
23 changes: 23 additions & 0 deletions experiments/chunk/azure-chunk-6-kbytes.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
{
"Provider": "azure",
"Runtime": "python3.8",
"SubExperiments": [
{
"Title": "azure-chunk-6-kbytes",
"Function": "hellopy-read-per-6144-bytes",
"Handler": "main.main",
"PackageType": "Zip",
"PackagePattern": "main.py",
"Bursts": 500,
"BurstSizes": [
1
],
"IATSeconds": 600,
"DesiredServiceTimes": [
"0ms"
],
"Parallelism": 50,
"FunctionImageSizeMB": 100
}
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import json
import os
import time
import random

import azure.functions as func


def main(req: func.HttpRequest, context: func.Context) -> func.HttpResponse:
incr_limit = int(req.params.get('IncrementLimit')) if req.params.get('IncrementLimit') else None
if not incr_limit:
try:
req_body = req.get_json()
except ValueError:
incr_limit = 0
pass
else:
incr_limit = int(req_body.get('IncrementLimit')) if req_body.get('IncrementLimit') else 0
else:
incr_limit = 0

simulate_work(incr_limit)
read_filler_file(f"{context.function_directory}/../filler.file")

return func.HttpResponse(
body=json.dumps({
"RequestID": context.invocation_id,
"TimestampChain": [str(time.time_ns())]
}, indent=4),
status_code=200,
headers={
"Content-Type": "application/json"
}
)


def simulate_work(increment):
# MAXNUM = 6103705
num = 0
while num < increment:
num += 1


def read_filler_file(path: str) -> None:
with open(path, 'rb') as f:
for i in range(1024):
f.seek(i * 1024)
f.read(1)
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import json
import os
import time
import random

import azure.functions as func


def main(req: func.HttpRequest, context: func.Context) -> func.HttpResponse:
incr_limit = int(req.params.get('IncrementLimit')) if req.params.get('IncrementLimit') else None
if not incr_limit:
try:
req_body = req.get_json()
except ValueError:
incr_limit = 0
pass
else:
incr_limit = int(req_body.get('IncrementLimit')) if req_body.get('IncrementLimit') else 0
else:
incr_limit = 0

simulate_work(incr_limit)
read_filler_file(f"{context.function_directory}/../filler.file")

return func.HttpResponse(
body=json.dumps({
"RequestID": context.invocation_id,
"TimestampChain": [str(time.time_ns())]
}, indent=4),
status_code=200,
headers={
"Content-Type": "application/json"
}
)


def simulate_work(increment):
# MAXNUM = 6103705
num = 0
while num < increment:
num += 1


def read_filler_file(path: str) -> None:
with open(path, 'rb') as f:
for i in range(1024):
f.seek(i * 6144)
f.read(1)

0 comments on commit 5605925

Please sign in to comment.