Commit 137d9414 authored by Federico Sismondi's avatar Federico Sismondi
Browse files

Merge branch 'develop' of http://gitlab.f-interop.eu/f-interop-contributors/ioppytest into develop

parents 73646117 09091b54
Pipeline #3112 failed with stage
in 0 seconds
......@@ -42,9 +42,12 @@ LIST = automated_iut-coap_client-coapthon \
testing_tool-interoperability-lwm2m \
reference_iut-coap_server \
reference_iut-coap_client \
automated_iut-onem2m_adn \
automated_iut-onem2m_server-eclipse_om2m \
automated_iut-lwm2m_server-leshan \
automated_iut-lwm2m_client-leshan \
info:
@echo $(info_message)
......@@ -330,8 +333,13 @@ _docker-build-lwm2m-additional-resources:
_docker-build-onem2m-additional-resources:
@echo "Starting to build onem2m-additional-resources.. "
@echo "TBD"
docker build --quiet -t automated_iut-onem2m_server-eclipse_om2m-v$(version) -f automation/onem2m_cse_eclipse_om2m/Dockerfile .
docker tag automated_iut-onem2m_server-eclipse_om2m-v$(version):latest automated_iut-onem2m_server-eclipse_om2m
docker build --quiet -t automated_iut-onem2m_adn-v$(version) -f automation/onem2m_adn_etsi_implementation/Dockerfile .
docker tag automated_iut-onem2m_adn-v$(version):latest automated_iut-onem2m_adn
_docker-build-comi-additional-resources:
@echo "Starting to build comi-additional-resources.. "
docker build --quiet -t automated_iut-comi_server-acklio-v$(version) -f automation/comi_server_acklio/Dockerfile .
......
......@@ -3,17 +3,15 @@ ioppytest framework:
ioppytest is a framework for running interoperability tests.
This initial version tackles technical interoperability testing (CoAP
and 6LoWPAN interop tests), and effort is being made to implement
interop semantic tests notably for running tests in the WoT and OneM2M
context.
This initial version tackles technical interoperability testing (CoAP,
LwM2M, 6LoWPAN and OneM2M interop tests).
This repo contains all necessary software (and their dependencies) for
running a interoperability test sessions between two implementations
under test (IUT).
This can be run as standalone software and also integrated to f-interop
architecture.
platform (go.f-interop.eu)
Implemented test suites in the ioppytest framework:
......@@ -25,6 +23,8 @@ framework are:
- CoAP Test Suite (user's IUT vs automated-IUT)
- CoAP Test Suite (between two users' IUT)
- 6LoWPAN Test Suite (between two users' IUT) (WIP)
- LwM2M Test Suite (between two users' IUT) (WIP)
- oneM2M Test Suite (between two users' IUT) (WIP)
......@@ -91,56 +91,91 @@ Event Bus API:
All the calls between the components are documented here:
[CORE API doc](http://doc.f-interop.eu/interop/)
and
[interop tests API doc](http://doc.f-interop.eu/interop/)
Running a test suite:
---------------------
user needs :
- an implementation under test (IUT) of a standard supported/protocol
by ioppytest framework, which either runs in some specific hardware or
locally in user's PC, e.g. a coap client implementation
by ioppytest framework e.g. a coap client implementation
- run
[the agent component](http://doc.f-interop.eu/interop/#agent)
which will route all the packets emitted from the IUT to the backend
and also to the second IUT (and viceversa)
which plays the role of a vpn-client , and which will route all the
packets sent from the IUT (on a certain ipv6 network) to the
backend -which is later on routed to second IUT (and viceversa)-.
- a user interface to help coordinating the tests
(either GUI or CLI component)
For simplifying the access to CLI, agent and other components, ioppytest
inlcudes a Makefile, with it you can use `make cmd`,
includes a Makefile, with it you can use `make <cmd>`,
for more information execute `make help`
# Running a test suite using F-Interop platform
### make commands
go to [go.f-interop.eu](go.f-interop.eu) and follow the instructions
```
➜ ioppytest git:(master) ✗ make help
help: Help dialog.
build-all-coap-images: Build all testing tool in docker images, and other docker image resources too
build-tools: builds all testing tool docker images (only testing tool)
build-automated-iuts: Build all automated-iut docker images
build-all: Build all testing tool in docker images, and other docker image resources too
clean: clean data directory
sniff-bus: Listen and echo all messages in the event bus
run-cli: Run interactive shell
run-6lowpan-testing-tool: Run 6LoWPAN testing tool in docker container
run-coap-testing-tool: Run CoAP testing tool in docker container
run-lwm2m-testing-tool: Run lwm2m testing tool in docker container
run-onem2m-testing-tool: Run oneM2M testing tool in docker container
run-comi-testing-tool: Run CoMI testing tool in docker container
stop-all: stops testing tools and IUTs running as docker containers
validate-test-description-syntax: validate (yaml) test description file syntax
run-tests: runs all unittests
get-logs: echoes logs from the running containers
install-python-dependencies: installs all py2 and py3 pip dependencies
```
# Running a test suite standalone
# Test session setups:
This mode of execution work for any of the following circumstances
The supported setups are:
- user controls one IUT and wants to run tests against one of the
- user drives one IUT and wants to run tests against one of the
automated-IUTs the framework supports
- user controls one IUT and is in direct contact with a second user
controlling a second IUT
- user controls both implementations (IUTs) taking part in the interop
- user drives both implementations (IUTs) taking part in the interop
session
- user1 drives an IUT, user2 drives an IUT, users are either both
in-situ, or remotely located.
# (opt 1) Running a test suite using F-Interop platform
go to [go.f-interop.eu](go.f-interop.eu) and follow the instructions
## Set up up the message broker
Recommended option (more user friendly).
The interop testing tool use AMQP for sending messages between its
components, and the remote ones (like the agent). When running a
standalone setup the user first needs to have a RMQ broker running..
# (opt 2) Running a test suite standalone
for this, you will use ioppytest_cli as CLI for
interfacing with testing tool (comms go over AMQP event bus).
Recommended option only for testing tool contributors.
## (opt 2.1) Set up up the message broker
The interop testing tool use RabbitMQ (RMQ) message broker for sending
messages between its components, and the remote ones (like the agent).
RMQ broker is a component which is **external** to the testing tool
and which establish the session and infrastructure so compomnents can
and which establish the session and infrastructure so components can
communicate with each other during the test session.
If using [go.f-interop.eu](go.f-interop.eu) then this is automatically
set-up for you.
When running a standalone setup the user first needs to have a RMQ
broker running..
The options for this are:
- install locally RabbitMQ message broker on local machine,
......@@ -149,12 +184,12 @@ create RMQ vhost, user, pass on local machine
(# TODO add instructions)
- Request a remote vhost and credentials (user,pass) to
federico.sismondi@inria.fr (recommended)
federico<dot>sismondi<at>inria<dot>fr (recommended)
don't hesitate to contact me, this is a very simple procedure and it's
free :D
## Export AMQP environment variables
## (opt 2.2) Export AMQP environment variables
after having a created vhost with its user/password,
export in the machine where the testing tool is running the following
......@@ -165,31 +200,31 @@ export AMQP_URL='amqp://someUser:somePassword@server/amqp_vhost'
export AMQP_EXCHANGE='amq.topic'
```
## Download the source code (see `Clonning the project` for more info)
## (opt 2.3) Download the source code (see `Clonning the project` for more info)
```
git clone --recursive https://gitlab.f-interop.eu/f-interop-contributors/ioppytest.git
git clone https://gitlab.f-interop.eu/f-interop-contributors/ioppytest.git
cd ioppytest
```
## Build the testing tools
## (opt 2.4) Build the testing tools
(docker, py2 and py3 needs to be installed in the machine)
```
make build-all
```
## Run testing tool (CoAP testing tool example)
## (opt 2.5) Run testing tool (CoAP testing tool example)
```
make run-coap-testing-tool
```
## Connect to the interop session using the CLI
## (opt 2.6) Connect to the interop session using the CLI
```
make run-cli
```
## Connect the agent to the backend
## (opt 2.7) Connect the agent to the backend
if user's IUT is a CoAP client:
......@@ -203,9 +238,9 @@ if user's IUT is a CoAP server:
make run-agent-coap-server
```
## Running a second IUT
## (opt 2.8) Running a second IUT
### User to user session, second user with his/her own IUT
### (opt 2.8.1) User to user session, second user with his/her own IUT
The second IUT needs to connect to the same VHOST the same way first IUT
did. For this the RMQ broker needs to be reachable by this second IUT
......@@ -214,7 +249,7 @@ did. For this the RMQ broker needs to be reachable by this second IUT
If this is the case then user 2 should just export the same environment
variables as user 1, and launch agent, and CLI just as user 1 did.
### Single user session, against an automated-IUT
### (opt 2.8.2) Single user session, against an automated-IUT
If the user wants to run test against one of the automated-IUT
(~reference implementation) supported by ioppytest:
......@@ -230,7 +265,7 @@ make run-coap-client
```
## Running the interop session
## (opt 2.9) Running the interop session
Simply follow the CLI instructions and enjoy! :)
......@@ -244,37 +279,10 @@ Developping a new test suite:
## Clonning the project
```
git clone --recursive https://gitlab.f-interop.eu/f-interop-contributors/ioppytest.git
git clone https://gitlab.f-interop.eu/f-interop-contributors/ioppytest.git
cd ioppytest
```
### Attention with the git submodules!
remember when cloning a project with submodules to use --recursive flag
```
git clone --recursive ...
```
or else (in case you forgot about the flag), right after cloning you can:
```
git submodule update --init --recursive
```
whenever you find that your utils libraries are not the latests versions
you can 'bring' those last changes from the main utils repo to your project
with:
```
git submodule update --remote --merge
```
after bringing the last changes you can update your project with the last changes by doing:
```
git add <someSubModuleDir>
git commit -m 'updated submodule reference to last commit'
git push
```
## How to merge new features to upstream branch ?
Read CONTRIBUTING.rst document
......@@ -299,7 +307,8 @@ docker run -it
alternatively, if you are curious and you want to know
what's under the hood:
what's under the hood, you can see which processes are being run -in
the docker container- by the testing tool:
```
docker run -it
......@@ -332,7 +341,7 @@ you can use for example envs/coap_testing_tool/supervisor.conf.ini
for using the coap_testing_tool
note: use -E when launching supervisor process, it preserves the
env vars
env vars (like an exported AMQP_URL)
FAQ
......@@ -351,7 +360,7 @@ FAQ
- I have my own CoAP implementation, how can I add it as an
automated-IUT into CoAP Testing Tool:
please contact federico.sismondi@inria.fr
please contact federico<dot>sismondi<at>inria<dot>fr
- Docker build returns a "cannot fetch package" or a "cannot resolve .."
......
FROM ubuntu:16.04
RUN apt-get update
RUN apt-get -y -qq install apt-utils
RUN apt-get -y -qq install python3-dev
RUN apt-get -y -qq install build-essential
RUN apt-get -y -qq install python3-setuptools
RUN apt-get -y -qq install python3-pip
RUN apt-get -y -qq install python-pip
RUN apt-get -y -qq install supervisor
RUN apt-get -y -qq install net-tools
RUN apt-get -y install iputils-ping
RUN apt-get -y -qq install make
# upgrade pip
RUN python -m pip install --upgrade pip
RUN python3 -m pip install --upgrade pip
# requirement for californium
RUN apt-get -y install default-jre
ADD . /ioppytest
ENV PATH="/ioppytest:$PATH"
WORKDIR /ioppytest
# install aux ioppytest packages
RUN python -m pip install ioppytest-agent
RUN python3 -m pip install ioppytest-utils
# requirements on automated-iut python code
RUN python3 -m pip install -r automation/requirements.txt
# temp PATCH for avoiding https://github.com/celery/py-amqp/issues/191
RUN python3 -m pip install -Iv amqp==2.2.2
RUN python -m pip install -Iv amqp==2.2.2
EXPOSE 5671 5672
# launch processes
CMD ["/usr/bin/supervisord", "--nodaemon", "--configuration", "automation/onem2m_adn_etsi_implementation/supervisor.conf"]
# -*- coding: utf-8 -*-
# !/usr/bin/env python3
import subprocess
from automation import COAP_SERVER_HOST, COAP_SERVER_PORT, COAP_CLIENT_HOST, LOG_LEVEL
from automation.automated_iut import *
logger = logging.getLogger()
logger.setLevel(LOG_LEVEL)
default_coap_server_base_url = 'coap://[%s]:%s' % (COAP_SERVER_HOST, COAP_SERVER_PORT)
coap_host_address = COAP_CLIENT_HOST
class ADN(AutomatedIUT):
"""
ADN implementation expects:
java -jar (..)/adn.jar -h [bbbb::2] -p 5683 -ci server -cn server -o Cae-admin -t TD_M2M_NH_01
"""
component_id = 'automated_iut-onem2m_adn'
node = 'adn'
iut_base_cmd = 'java -jar automation/onem2m_adn_etsi_implementation/target/adn/adn.jar -h [bbbb::2] -p 5683 -ci server -cn server -o Cae-admin -t'
# mapping message's stimuli id -> testcase id
stimuli_to_testcase_map = {
'TD_M2M_NH_01_step_01': 'TD_M2M_NH_01',
'TD_M2M_NH_06_step_01': 'TD_M2M_NH_06',
'TD_M2M_NH_07_step_01': 'TD_M2M_NH_07',
'TD_M2M_NH_08_step_01': 'TD_M2M_NH_08',
'TD_M2M_NH_09_step_01': 'TD_M2M_NH_09',
'TD_M2M_NH_10_step_01': 'TD_M2M_NH_10',
'TD_M2M_NH_11_step_01': 'TD_M2M_NH_11',
'TD_M2M_NH_12_step_01': 'TD_M2M_NH_12',
'TD_M2M_NH_13_step_01': 'TD_M2M_NH_13',
'TD_M2M_NH_14_step_01': 'TD_M2M_NH_14',
'TD_M2M_NH_15_step_01': 'TD_M2M_NH_15',
'TD_M2M_NH_17_step_01': 'TD_M2M_NH_17',
'TD_M2M_NH_49_step_01': 'TD_M2M_NH_49',
'TD_M2M_NH_50_step_01': 'TD_M2M_NH_50',
'TD_M2M_NH_71_step_01': 'TD_M2M_NH_71',
'TD_M2M_NH_72_step_01': 'TD_M2M_NH_72',
'TD_M2M_NH_18_step_01': 'TD_M2M_NH_18',
'TD_M2M_NH_19_step_01': 'TD_M2M_NH_19',
'TD_M2M_NH_20_step_01': 'TD_M2M_NH_20',
'TD_M2M_NH_21_step_01': 'TD_M2M_NH_21',
'TD_M2M_NH_22_step_01': 'TD_M2M_NH_22',
'TD_M2M_NH_23_step_01': 'TD_M2M_NH_23',
'TD_M2M_NH_24_step_01': 'TD_M2M_NH_24',
'TD_M2M_NH_25_step_01': 'TD_M2M_NH_25',
'TD_M2M_NH_26_step_01': 'TD_M2M_NH_26',
'TD_M2M_NH_27_step_01': 'TD_M2M_NH_27',
'TD_M2M_NH_28_step_01': 'TD_M2M_NH_28',
'TD_M2M_NH_29_step_01': 'TD_M2M_NH_29',
'TD_M2M_NH_30_step_01': 'TD_M2M_NH_30',
'TD_M2M_NH_31_step_01': 'TD_M2M_NH_31',
'TD_M2M_NH_32_step_01': 'TD_M2M_NH_32',
'TD_M2M_NH_33_step_01': 'TD_M2M_NH_33',
'TD_M2M_NH_34_step_01': 'TD_M2M_NH_34',
'TD_M2M_NH_35_step_01': 'TD_M2M_NH_36',
'TD_M2M_NH_36_step_01': 'TD_M2M_NH_36',
'TD_M2M_NH_37_step_01': 'TD_M2M_NH_37',
'TD_M2M_NH_38_step_01': 'TD_M2M_NH_38',
}
implemented_stimuli_list = list(stimuli_to_testcase_map.keys())
implemented_testcases_list = list(stimuli_to_testcase_map.values())
def __init__(self):
super().__init__(self.node)
logger.info('starting %s [ %s ]' % (self.node, self.component_id))
def _execute_verify(self, verify_step_id):
logger.warning('Ignoring: %s. No auto-iut mechanism for verify step implemented.' % verify_step_id)
def _execute_stimuli(self, stimuli_step_id, addr):
logger.info('got stimuli execute request: \n\tSTIMULI_ID=%s,\n\tTARGET_ADDRESS=%s' % (stimuli_step_id, addr))
#if addr and addr is not "":
# target_base_url = 'coap://[%s]:%s' % (addr, COAP_SERVER_PORT)
#else:
# target_base_url = default_coap_server_base_url
try:
# Generate IUT CMD for stimuli
cmd = self.iut_base_cmd
#cmd += ' {option} {value}'.format(option='-u', value=target_base_url)
cmd += ' {option} {value}'.format(option='-t', value=self.stimuli_to_testcase_map[stimuli_step_id])
# Execute IUT CMD for stimuli
logger.info('Spawning process with : %s' % cmd)
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
proc.wait(timeout=STIMULI_HANDLER_TOUT)
# GET stdout IUT CMD for stimuli
output = ''
while proc.poll() is None:
output += str(proc.stdout.readline())
output += str(proc.stdout.read())
logger.info('EXECUTED: %s' % stimuli_step_id)
logger.info('Process STDOUT: %s' % output)
except subprocess.TimeoutExpired as tout:
logger.warning('Process TIMEOUT. info: %s' % str(tout))
except Exception as e:
logging.error('Error found on automated-iut while tryning to execute stimuli %s' % stimuli_step_id)
logging.error(e)
def _execute_configuration(self, testcase_id, node):
# no config / reset needed for implementation
return coap_host_address
if __name__ == '__main__':
try:
iut = ADN()
iut.start()
iut.join()
except Exception as e:
logger.error(e)
[unix_http_server]
file=/tmp/supervisor.sock ; (the path to the socket file)
[supervisord]
logfile=/tmp/supervisord.log ; (main log file;default $CWD/supervisord.log)
logfile_maxbytes=50MB ; (max main logfile bytes b4 rotation;default 50MB)
logfile_backups=10 ; (num of main logfile rotation backups;default 10)
loglevel=info ; (log level;default info; others: debug,warn,trace)
pidfile=/tmp/supervisord.pid ; (supervisord pidfile;default supervisord.pid)
nodaemon=false ; (start in foreground if true;default false)
minfds=1024 ; (min. avail startup file descriptors;default 1024)
minprocs=200 ; (min. avail process descriptors;default 200)
[rpcinterface:supervisor]
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
[supervisorctl]
serverurl=unix:///tmp/supervisor.sock ; use a unix:// URL for a unix socket
[program:agent]
command = ioppytest-agent connect
--url %(ENV_AMQP_URL)s
--exchange %(ENV_AMQP_EXCHANGE)s
--name adn
--force-bootstrap
--ipv6-prefix bbbb
--ipv6-host 1
user=root
stopsignal=INT
stopasgroup=true
autorestart=false
loglevel=debug
; for sending logs to docker
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0
;redirect_stderr=true
;stdout_logfile = /var/log/agent-adn-stdout.log
;stdout_logfile_maxbytes = 10MB
;stdout_logfile_backups = 5
[program:init]
command = sh -c "sleep 6;chmod 777 automation/onem2m_adn_etsi_implementation/target/init/start.sh;./start.sh"
autorestart=false
stopsignal=INT
stopasgroup=true
loglevel=debug
; for sending logs to docker
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0
;redirect_stderr=true
;stdout_logfile = /var/log/automated_iut-init-stdout.log
[program:automated-iut]
command = sh -c "sleep 6;/usr/bin/python3 -m automation.onem2m_adn_etsi_implementation.automated_iut"
autorestart=false
stopsignal=INT
stopasgroup=true
loglevel=debug
; for sending logs to docker
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0
;redirect_stderr=true
;stdout_logfile = /var/log/automated_iut-adn-stdout.log
#Californium CoAP Properties file
#Sun Jan 28 23:20:48 CET 2018
DEFAULT_ENDPOINT_THREAD_COUNT=1
MAX_TRANSMIT_WAIT=93000
MAX_RETRANSMIT=4
UDP_CONNECTOR_SENDER_THREAD_COUNT=1
DEFAULT_LEISURE=5000
NOTIFICATION_MAX_AGE=128000
CROP_ROTATION_PERIOD=2000
USE_RANDOM_MID_START=true
ACK_TIMEOUT_SCALE=2
UDP_CONNECTOR_DATAGRAM_SIZE=2000
DEDUPLICATOR=DEDUPLICATOR_MARK_AND_SWEEP
HTTP_PORT=8080
NSTART=1
NOTIFICATION_CHECK_INTERVAL=86400000
MAX_MESSAGE_SIZE=1024
DEFAULT_COAP_PORT=5683
DEFAULT_BLOCK_SIZE=512
UDP_CONNECTOR_RECEIVE_BUFFER=0
ACK_TIMEOUT=2000
HTTP_CACHE_SIZE=32
UDP_CONNECTOR_OUT_CAPACITY=2147483647
HTTP_SERVER_SOCKET_BUFFER_SIZE=8192
ACK_RANDOM_FACTOR=1.5
MARK_AND_SWEEP_INTERVAL=10000
NOTIFICATION_CHECK_INTERVAL_COUNT=100
HTTP_CACHE_RESPONSE_MAX_AGE=86400
UDP_CONNECTOR_LOG_PACKETS=false
PROBING_RATE=1.0
NOTIFICATION_REREGISTRATION_BACKOFF=2000
EXCHANGE_LIFECYCLE=247000
SERVER_THRESD_NUMER=4
UDP_CONNECTOR_RECEIVER_THREAD_COUNT=1
UDP_CONNECTOR_SEND_BUFFER=0
HTTP_SERVER_SOCKET_TIMEOUT=100000
USE_RANDOM_TOKEN_START=true
#Californium CoAP Properties file
#Sun Jan 28 23:20:48 CET 2018
DEFAULT_ENDPOINT_THREAD_COUNT=1
MAX_TRANSMIT_WAIT=93000
MAX_RETRANSMIT=4
UDP_CONNECTOR_SENDER_THREAD_COUNT=1
DEFAULT_LEISURE=5000
NOTIFICATION_MAX_AGE=128000
CROP_ROTATION_PERIOD=2000
USE_RANDOM_MID_START=true
ACK_TIMEOUT_SCALE=2
UDP_CONNECTOR_DATAGRAM_SIZE=2000
DEDUPLICATOR=DEDUPLICATOR_MARK_AND_SWEEP
HTTP_PORT=8080
NSTART=1
NOTIFICATION_CHECK_INTERVAL=86400000
MAX_MESSAGE_SIZE=1024
DEFAULT_COAP_PORT=5683
DEFAULT_BLOCK_SIZE=512
UDP_CONNECTOR_RECEIVE_BUFFER=0
ACK_TIMEOUT=2000
HTTP_CACHE_SIZE=32