Skip to content

Commit

Permalink
Tests Using Locust and Faker
Browse files Browse the repository at this point in the history
  • Loading branch information
jiahuili430 committed Nov 2, 2021
1 parent 057e101 commit fc90a9a
Show file tree
Hide file tree
Showing 6 changed files with 342 additions and 0 deletions.
29 changes: 29 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,15 @@
<version>${scala.plugin.version}</version>
<configuration>
<launchers>
<launcher>
<id>clouseau</id>
<mainClass>com.cloudant.clouseau.Main</mainClass>
<jvmArgs>
<jvmArg>[email protected]</jvmArg>
<jvmArg>-Dclouseau.cookie=monster</jvmArg>
<jvmArg>-Dclouseau.dir=${basedir}/target/clouseau</jvmArg>
</jvmArgs>
</launcher>
<launcher>
<id>clouseau1</id>
<mainClass>com.cloudant.clouseau.Main</mainClass>
Expand Down Expand Up @@ -346,6 +355,26 @@
</execution>
</executions>
</plugin>
<!-- Code Coverage report generation -->
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>0.7.9</version>
<executions>
<execution>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>generate-code-coverage-report</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
<extensions>
<extension>
Expand Down
51 changes: 51 additions & 0 deletions test/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
# Locust Test

Test `Clouseau` using [Locust](https://github.com/locustio/locust) and [Faker](https://github.com/joke2k/faker).

## Configuration options

Locust configuration options.

Command line | Description
--- | ---
--headless | Disable the web interface, and start the test
--only-summary | Only print the summary stats
--host | Host to load test
-u | Peak number of concurrent Locust users
-r | Rate to spawn users at (users per second)
-t | Stop after the specified amount of time
--docs-number | The number of generated documents (default: 10)

```
locust -f locustfile.py --headless --only-summary --docs-number 10 -u 1 -r 1 -t 10
```

## Basic Usage

Run `CouchDB` and `Clouseau` in different terminals, and then run the locust test:

```
# Open 4 different terminals and run the command:
./dev/run --admin=adm:pass
mvn scala:run -Dlauncher=clouseau1
mvn scala:run -Dlauncher=clouseau2
mvn scala:run -Dlauncher=clouseau3
```

### Install dependencies:

```
./run install
```

### Run random_tree_generator tests:

```
./run locust
```

### Cleanup

```
./run clean
```
50 changes: 50 additions & 0 deletions test/data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import math
import json
from datetime import date
from random import choice
from faker import Faker


def write_to_files(data, filename, separator=0):
if separator:
chunks = [data[separator * i:separator * (i + 1)]
for i in range(math.ceil(len(data) / separator))]
idx = filename.find('.json')
for i in range(len(chunks)):
with open(filename[:idx] + str(i) + filename[idx:], 'w') as outfile:
json.dump(chunks[i], outfile)
else:
with open(filename, 'w') as outfile:
json.dump(data, outfile)


def gen_data(n=10, separator=10):
data = []
counter = {}
fake = Faker()
fields = ['married', 'ethnicity', 'gender']
counter['total_rows'] = n

for i in range(n):
data.append({'_id': str(i)})
data[i]['gender'] = choice(['M', 'F'])
data[i]['name'] = fake.name_male() if data[i]['gender'] == 'M' else fake.name_female()
data[i]['date_of_birth'] = fake.iso8601()
data[i]['age'] = date.today().year - int(data[i]['date_of_birth'][:4])
data[i]['married'] = 'False' if data[i]['age'] < 22 else choice(['True', 'False'])
data[i]['ethnicity'] = choice(['White', 'Black', 'Asian', 'Hispanic', 'non-Hispanic'])
data[i]['address'] = {'full_address': fake.address()}
data[i]['address']['city'] = data[i]['address']['full_address'][
data[i]['address']['full_address'].find('\n') + 1: -10]
data[i]['address']['area'] = data[i]['address']['full_address'][-8:-6]
data[i]['address']['zip'] = data[i]['address']['full_address'][-5:]
data[i]['lat'] = float(fake.latitude())
data[i]['long'] = float(fake.longitude())

for field in fields:
if field not in counter:
counter[field] = {}
counter[field].update({data[i][field]: counter[field].get(data[i][field], 0) + 1})

write_to_files(data, 'data.json', separator)
write_to_files(counter, 'analysis.json')
167 changes: 167 additions & 0 deletions test/locustfile.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,167 @@
import math
import json
import time
import requests
from locust import events, HttpUser, constant, task, tag

import data
from logger import logger

URL = "http://adm:pass@localhost:15984"
DB = "http://adm:pass@localhost:15984/demo"
SESSION = requests.session()
timer = [time.perf_counter()]


def create_database():
if SESSION.get(DB).status_code == 200:
SESSION.delete(DB)
SESSION.put(DB)


def insert_docs(docs_number, separator):
for i in range(math.ceil(docs_number / separator)):
payload = {"docs": []}
with open("data" + str(i) + ".json") as json_file:
payload["docs"].extend(json.load(json_file))
SESSION.post(DB + "/_bulk_docs", json=payload, headers={"Content-Type": "application/json"})


def create_indexes():
design_docs = {
"_id": "_design/search",
"indexes": {
"search_index": {
"index": "function(doc) {if(doc.gender) {index(\"gender\", doc.gender, {\"store\": true} );};"
"if(doc.age) {index(\"age\", doc.age, {\"store\": true} );};"
"if(doc.married) {index(\"married\", doc.married, {\"store\": true} );};"
"if(doc.ethnicity) {index(\"ethnicity\", doc.ethnicity, {\"store\": true} );}}"
}
}
}
SESSION.put(f"{DB}/_design/search", data=json.dumps(design_docs))


def get_result(condition, response, func_name):
response.success() if condition else response.failure(func_name + " FAILED.")


@events.init_command_line_parser.add_listener
def _(parser):
parser.add_argument("--docs-number", type=int, env_var="LOCUST_DOCS_NUMBER", default=100_000,
help="How many documents do you want to generate")
parser.add_argument("--separator", type=int, env_var="LOCUST_SEPARATOR", default=5000,
help="How many documents are stored in each JSON file")


@events.test_start.add_listener
def _(environment, **kw):
data.gen_data(environment.parsed_options.docs_number, environment.parsed_options.separator)
timer.append(time.perf_counter())
logger.critical(f"1. Generate documents ---- TIME: {timer[-1] - timer[-2]}")


class LoadTest(HttpUser):
host = URL
wait_time = constant(1)

def on_start(self):
self.client.get("/", name=self.on_start.__name__)
create_database()
timer.append(time.perf_counter())
logger.debug(f"2. Create Database ---- TIME: {timer[-1] - timer[-2]}")
insert_docs(self.environment.parsed_options.docs_number, self.environment.parsed_options.separator)
timer.append(time.perf_counter())
logger.info(f"2. Insert docs ---- TIME: {timer[-1] - timer[-2]}")
create_indexes()
timer.append(time.perf_counter())
logger.info(f"2. Create design docs ---- TIME: {timer[-1] - timer[-2]}")
logger.critical("3. Start testing ... ")
with open("analysis.json") as json_file:
self.data = json.load(json_file)

@tag("get")
@task
def get_all_dbs(self):
with self.client.get("/demo/_all_dbs", catch_response=True, name="Get All DBs") as response:
get_result(
len(response.text) and response.elapsed.total_seconds() < 2.0,
response, self.get_all_dbs.__name__)

@tag("get")
@task
def get_all_docs(self):
with self.client.get("/demo/_all_docs", catch_response=True, name="Get All Docs") as response:
get_result(
len(response.text) and response.elapsed.total_seconds() < 2.0,
response, self.get_all_docs.__name__)

@tag("search")
@task
def search_all_docs(self):
with self.client.get("/demo/_design/search/_search/search_index?query=*:*",
catch_response=True, name="Search All Docs") as response:
get_result(
response.status_code == 200 and response.json()["total_rows"] == self.data["total_rows"],
response, self.search_all_docs.__name__)

@tag("search")
@task
def search_gender_is_male(self):
with self.client.get("/demo/_design/search/_search/search_index?query=gender:m",
catch_response=True, name="Search Gender is Male") as response:
get_result(
response.status_code == 200 and response.json()["total_rows"] == self.data["gender"]["M"],
response, self.search_gender_is_male.__name__)

@tag("search")
@task
def search_gender_is_male_with_limit_2(self):
with self.client.get("/demo/_design/search/_search/search_index?query=gender:m&limit=2",
catch_response=True, name="Search Gender is Male with Limit 2") as response:
get_result(
response.status_code == 200 and len(response.json()["rows"]) == 2,
response, self.search_gender_is_male_with_limit_2.__name__)

@tag("search")
@task
def search_gender_is_female_and_sort_by_age(self):
with self.client.get("/demo/_design/search/_search/search_index?query=gender:f&sort=\"age\"",
catch_response=True, name="Search Gender is Female AND Sort by age") as response:
result = response.json()
if self.data["gender"]["F"] >= 2:
conditions = result["total_rows"] == self.data["gender"]["F"] and \
result["rows"][0]["order"][1] <= result["rows"][1]["order"][1]
else:
conditions = result["total_rows"] == self.data["gender"]["F"]
get_result(conditions, response, self.search_gender_is_female_and_sort_by_age.__name__)

@tag("search")
@task
def search_married_people_age_should_greater_than_21(self):
with self.client.get(
"/demo/_design/search/_search/search_index?query=married:true",
catch_response=True, name="Search married people age > 21") as response:
result = response.json()
for i in result["rows"]:
if i["fields"]["age"] <= 21:
response.failure(self.search_married_people_age_should_greater_than_21.__name__)
response.success()

@tag("search")
@task
def search_ethnicity_White_OR_Asian(self):
with self.client.get(
f"/demo/_design/search/_search/search_index?query=ethnicity:White OR ethnicity:Asian",
catch_response=True, name="Search ethnicity White OR Asian") as response:
result = response.json()
get_result(
response.status_code == 200 and
result["total_rows"] == self.data["ethnicity"]["White"] + self.data["ethnicity"]["Asian"],
response, self.search_ethnicity_White_OR_Asian.__name__)

def on_stop(self):
self.client.get("/", name=self.on_stop.__name__)
timer.append(time.perf_counter())
logger.debug(f"4. Delete database, and shut down the locust ---- TIME: {timer[-1] - timer[-2]}")
SESSION.delete(DB)
16 changes: 16 additions & 0 deletions test/logger.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import logging

from rich.logging import RichHandler

logger = logging.getLogger(__name__)

shell_handler = RichHandler()

logger.setLevel(logging.DEBUG)
shell_handler.setLevel(logging.DEBUG)

fmt_shell = '%(message)s'
shell_formatter = logging.Formatter(fmt_shell)
shell_handler.setFormatter(shell_formatter)

logger.addHandler(shell_handler)
29 changes: 29 additions & 0 deletions test/run
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
#!/bin/bash
case $1 in
h | help)
echo "Common options:
h, help Show this help message
i, install Install dependencies
l, locust Run locust tests
(Specify the docs number to be tested, run $./run l {docs_number}})
c, cleanup Cleanup the directory"
;;
i | install)
echo Install dependencies
python3 -m pip install Faker locust rich
;;
l | locust)
if [ ! -z $2 ] && [ $2 -ge 1 ]; then
locust -f locustfile.py --headless --only-summary --docs-number $2 --separator 5000 -u 1 -r 1 -t 300
else
locust -f locustfile.py --headless --only-summary --docs-number 100 --separator 50 -u 1 -r 1 -t 10
fi
;;
c | clean)
rm -rf *.json __pycache__/
echo cleanup DONE
;;
*)
echo don\'t know
;;
esac

0 comments on commit fc90a9a

Please sign in to comment.