Skip to content

Commit

Permalink
Merge pull request #57 from salehsedghpour/fix/remove-stress-ng
Browse files Browse the repository at this point in the history
Fix/remove stress ng
  • Loading branch information
alekodu authored Jun 22, 2022
2 parents 789ac76 + 2febf89 commit 9ae01cd
Show file tree
Hide file tree
Showing 5 changed files with 28 additions and 85 deletions.
15 changes: 1 addition & 14 deletions generator/input/new_description.json
Original file line number Diff line number Diff line change
Expand Up @@ -49,20 +49,7 @@
"protocol": "http",
"execution_mode": "sequential",
"cpu_complexity": {
"execution_time": "1s",
"method": "fibonacci",
"workers": 2,
"cpu_affinity": [
0,
2
],
"cpu_load": "10%"
},
"memory_complexity": {
"execution_time": "1s",
"method": "swap",
"workers": 2,
"bytes_load": "10%"
"execution_time": 0.001
},
"network_complexity": {
"forward_requests": "asynchronous",
Expand Down
16 changes: 2 additions & 14 deletions generator/src/pkg/model/input.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,18 +26,7 @@ type CalledService struct {
}

type CpuComplexity struct {
ExecutionTime string `json:"execution_time"`
Method string `json:"method"`
Workers int `json:"workers"`
CpuAffinity []int `json:"cpu_affinity"`
CpuLoad string `json:"cpu_load"`
}

type MemoryComplexity struct {
ExecutionTime string `json:"execution_time"`
Method string `json:"method"`
Workers int `json:"workers"`
BytesLoad string `json:"bytes_load"`
ExecutionTime float32 `json:"execution_time"`
}

type NetworkComplexity struct {
Expand All @@ -51,7 +40,6 @@ type Endpoint struct {
Protocol string `json:"protocol"`
ExecutionMode string `json:"execution_mode"`
CpuComplexity *CpuComplexity `json:"cpu_complexity,omitempty"`
MemoryComplexity *MemoryComplexity `json:"memory_complexity,omitempty"`
NetworkComplexity NetworkComplexity `json:"network_complexity"`
}

Expand Down Expand Up @@ -82,7 +70,7 @@ type Service struct {

type Cluster struct {
Cluster string `json:"cluster"`
Replicas int `json:"replicas,omitempty"`
Replicas int `json:"replicas,omitempty"`
Namespace string `json:"namespace"`
Node string `json:"node,omitempty"`
Annotations []Annotation `json:"annotations,omitempty"`
Expand Down
16 changes: 1 addition & 15 deletions generator/src/pkg/service/util.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,7 @@ const (
EpExecModeDefault = "sequential"
EpNwResponseSizeDefault = 512

EpExecTimeDefault = "0.1s"
EpMethodDefault = "all"
EpWorkersDefault = 1
EpLoadDefault = "5%"
EpExecTimeDefault = 0.001

EpNwForwardRequests = "asynchronous"

Expand Down Expand Up @@ -324,22 +321,11 @@ func CreateInputEndpoint() model.Endpoint {
var ep model.Endpoint
ep.Protocol = defaultProtocol
var cpuComplexity model.CpuComplexity
var memoryComplexity model.MemoryComplexity

ep.CpuComplexity = &cpuComplexity
ep.MemoryComplexity = &memoryComplexity

ep.ExecutionMode = EpExecModeDefault
cpuComplexity.ExecutionTime = EpExecTimeDefault
cpuComplexity.Method = EpMethodDefault
cpuComplexity.Workers = EpWorkersDefault
cpuComplexity.CpuAffinity = []int{}
cpuComplexity.CpuLoad = EpLoadDefault

memoryComplexity.ExecutionTime = EpExecTimeDefault
memoryComplexity.Method = EpMethodDefault
memoryComplexity.Workers = EpWorkersDefault
memoryComplexity.BytesLoad = EpLoadDefault

ep.NetworkComplexity.ForwardRequests = EpNwForwardRequests
ep.NetworkComplexity.ResponsePayloadSize = EpNwResponseSizeDefault
Expand Down
21 changes: 0 additions & 21 deletions model/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -23,30 +23,9 @@ RUN apt install -y jq \
wget \
2to3

RUN apt install -y libbsd-dev \
libcap-dev \
libipsec-mb-dev \
libjudy-dev \
libkeyutils-dev \
libsctp-dev \
libatomic1 \
zlib1g-dev \
libkmod-dev \
libxxhash-dev \
git \
build-essential

WORKDIR /usr/src/app

RUN git clone https://github.com/alekodu/stress-ng.git &&\
cd stress-ng/ &&\
git checkout cloudsim &&\
make clean &&\
make &&\
mv stress-ng my-stress-ng &&\
cp my-stress-ng /usr/src/app &&\
cd .. &&\
rm -R stress-ng/

ADD ./requirements.txt /usr/src/app/requirements.txt

Expand Down
45 changes: 24 additions & 21 deletions model/restful/utils/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,11 @@
from flask import Blueprint, jsonify, request, current_app
from aiohttp import ClientSession
import asyncio
import subprocess
import sys
from concurrent.futures import ThreadPoolExecutor, as_completed
import requests
import time
import random
import string

FORMATTED_REMOTE_URL = "http://{0}:{1}/{2}"

Expand Down Expand Up @@ -87,10 +88,10 @@ def run_task(service_name, service_endpoint):
cpu_future = executor.submit(execute_cpu_bounded_task, service_endpoint["cpu_complexity"])
task_futures.append(cpu_future)

# Memory task
if ("memory_complexity" in service_endpoint) and len(service_endpoint["memory_complexity"]["execution_time"]) > 0:
mem_future = executor.submit(execute_memory_bounded_task, service_endpoint["memory_complexity"])
task_futures.append(mem_future)
# # Memory task
# if ("memory_complexity" in service_endpoint) and len(service_endpoint["memory_complexity"]["execution_time"]) > 0:
# mem_future = executor.submit(execute_memory_bounded_task, service_endpoint["memory_complexity"])
# task_futures.append(mem_future)

# Wait until all threads are done with their tasks
for future in as_completed(task_futures):
Expand All @@ -100,28 +101,31 @@ def run_task(service_name, service_endpoint):
elif task_type == "cpu":
response["cpu_task"]["services"].append(source_svc["service"]+"/"+source_svc["endpoint"])
response["cpu_task"]["statuses"].append(r)
elif task_type == "memory":
response["memory_task"]["services"].append(source_svc["service"]+"/"+source_svc["endpoint"])
response["memory_task"]["statuses"].append(r)
# elif task_type == "memory":
# response["memory_task"]["services"].append(source_svc["service"]+"/"+source_svc["endpoint"])
# response["memory_task"]["statuses"].append(r)

executor.shutdown()

return response


def execute_cpu_bounded_task(conf):
if len(conf["cpu_affinity"]) > 0:
res = subprocess.run(['/usr/src/app/my-stress-ng --class cpu --cpu %s --cpu-method %s --taskset %s --cpu-load %s --timeout %s --metrics-brief' % (conf["workers"], conf["method"], ",".join(str(cpu_id) for cpu_id in conf["cpu_affinity"]), conf["cpu_load"], conf["execution_time"])], capture_output=True, shell=True)
else:
res = subprocess.run(['/usr/src/app/my-stress-ng --class cpu --cpu %s --cpu-method %s --cpu-load %s --timeout %s --metrics-brief' % (conf["workers"], conf["method"], conf["cpu_load"], conf["execution_time"])], capture_output=True, shell=True)
def cpu_load(duration):
start = time.clock_gettime_ns(time.CLOCK_THREAD_CPUTIME_ID)
until = start + duration * 1000000000
while time.clock_gettime_ns(time.CLOCK_THREAD_CPUTIME_ID) < until:
pass

return res.stderr.decode("utf-8"), "cpu"

def execute_cpu_bounded_task(conf):
cpu_load(float(conf["execution_time"]))
return "execution_time: "+str(conf["execution_time"]), "cpu"


def execute_memory_bounded_task(conf):
res = subprocess.run(['/usr/src/app/my-stress-ng --class memory --vm %s --vm-method %s --vm-bytes %s --timeout %s --metrics-brief' % (conf["workers"], conf["method"], conf["bytes_load"], conf["execution_time"])], capture_output=True, shell=True)
#res = subprocess.run(['/usr/src/app/my-stress-ng --class memory --vm %s --vm-method %s --vm-bytes %s --timeout %s --metrics-brief' % (conf["workers"], conf["method"], conf["bytes_load"], conf["execution_time"])], capture_output=True, shell=True)

return res.stderr.decode("utf-8"), "memory"
return "Not implemented yet", "memory"


def run_network_task(source_svc, service_endpoint, headers, res_payload):
Expand Down Expand Up @@ -219,10 +223,9 @@ def sync_execute_io_bounded_task(source_service, target_service, forward_headers


def create_payload(payload_size):

request_payload = subprocess.run(['cat /dev/urandom | tr -dc "[:alnum:]" | head -c${1:-%s}' % payload_size], capture_output=True, shell=True)

return request_payload.stdout.decode("utf-8")
letters = string.ascii_lowercase
result_str = ''.join(random.choice(letters) for i in range(int(payload_size)))
return result_str


def create_response():
Expand Down

0 comments on commit 9ae01cd

Please sign in to comment.