Commit 42cf1d13 authored by Wen Wei Li's avatar Wen Wei Li

init

parents
Pipeline #4860 canceled with stages
---
kind: pipeline
type: docker
name: deploy
steps:
# Unit test
- name: unit-test
image: plugins/docker
settings:
dockerfile: Dockerfile-Unit-Test
# build images
- name: build-and-publish-image
image: plugins/docker
settings:
registry: 192.168.17.110
repo: 192.168.17.110/study_group/ric-app-ad
dockerfile: Dockerfile
tags: 1.0.0
username:
from_secret: harbor_username
password:
from_secret: harbor_password
insecure : true
storage_driver: vfs
depends_on: ["unit-test"]
# scan
- name: sast_scan
image: prlab/sast_scan
volumes:
- name: cache
path: /home/reports/
commands:
- python3 /usr/local/src/scan --type python,yaml,dockerfile,kubernetes --src ./ -o /home/reports
depends_on: ["build-and-publish-image"]
# sonarqube scan
- name: code-analysis
image: prlab/drone-sonar
settings:
SONAR_HOST:
from_secret: sonar_host
SONAR_TOKEN:
from_secret: sonar_token
depends_on: ["build-and-publish-image"]
# dependency check for python
- name: dependency-check-py
image: prlab/dependency-check-py
volumes:
- name: cache
path: /home/reports
depends_on: ["build-and-publish-image"]
- name: scp
image: appleboy/drone-scp
volumes:
- name: cache
path: /home/reports/
settings:
host:
from_secret: ssh_host
username:
from_secret: ssh_username
password:
from_secret: ssh_password
port : 22
target: /home/oran/Downloads/drone_reports
source: /home/reports/
depends_on: ["sast_scan", "code-analysis", "dependency-check-py"]
# deploy
- name: ssh_to_deploy
image: ghcr.io/appleboy/drone-ssh
settings:
host:
- 192.168.0.22
username:
from_secret: ssh_username
password:
from_secret: ssh_password
port: 22
command_timeout: 30s
script:
- cd /root/drone/
- if [ ! -d ${DRONE_REPO_NAME} ];then git clone ${DRONE_REPO_LINK}; else cd ${DRONE_REPO_NAME} && git pull; fi
- cd /root/xAppSec
- ./xAppSec -c /root/drone/${DRONE_REPO_NAME}/xapp-descriptor/config.json /root/drone/${DRONE_REPO_NAME}/xapp-descriptor/schema.json &> /root/drone/log
depends_on: ["scp"]
volumes:
- name: cache
temp: {}
[gerrit]
host=gerrit.o-ran-sc.org
port=29418
project=ric-app/ad
defaultbranch=master
---
# .readthedocs.yml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
# Required
version: 2
formats:
- htmlzip
build:
image: latest
python:
version: 3.7
install:
- requirements: docs/requirements-docs.txt
sphinx:
configuration: docs/conf.py
# ==================================================================================
# Copyright (c) 2020 HCL Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
FROM frolvlad/alpine-miniconda3:python3.7
# RMR setup
RUN mkdir -p /opt/route/
# copy rmr files from builder image in lieu of an Alpine package
COPY --from=nexus3.o-ran-sc.org:10002/o-ran-sc/bldr-alpine3-rmr:4.0.5 /usr/local/lib64/librmr* /usr/local/lib64/
COPY --from=nexus3.o-ran-sc.org:10002/o-ran-sc/bldr-alpine3-rmr:4.0.5 /usr/local/bin/rmr* /usr/local/bin/
ENV LD_LIBRARY_PATH /usr/local/lib/:/usr/local/lib64
COPY local.rt /opt/route/local.rt
ENV RMR_SEED_RT /opt/route/local.rt
RUN apk update && apk add gcc musl-dev
# Install
COPY setup.py /tmp
COPY LICENSE.txt /tmp/
# RUN mkdir -p /tmp/ad/
RUN pip install /tmp
RUN pip install ricxappframe
RUN pip install --force-reinstall redis==3.0.1
ENV PYTHONUNBUFFERED 1
COPY src/ /src
CMD PYTHONPATH=/src:/usr/lib/python3.7/site-packages/:$PYTHONPATH run-src.py
# ==================================================================================
# Copyright (c) 2020 HCL Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
FROM frolvlad/alpine-miniconda3:python3.7
# FROM python:3.8-alpine
# RMR setup
RUN mkdir -p /opt/route/
# sdl uses hiredis which needs gcc
RUN apk update && apk add gcc musl-dev
# copy rmr libraries from builder image in lieu of an Alpine package
COPY --from=nexus3.o-ran-sc.org:10002/o-ran-sc/bldr-alpine3-rmr:4.0.5 /usr/local/lib64/librmr* /usr/local/lib64/
# Upgrade pip, install tox
RUN pip install --upgrade pip && pip install tox
RUN apk update && apk add gcc musl-dev
# copies
COPY setup.py tox.ini LICENSE.txt /tmp/
RUN pip install /tmp
COPY src/ /tmp/src
COPY tests/ /tmp/tests
# Run the unit tests
WORKDIR /tmp
RUN PYTHONPATH=/tmp/src:/usr/lib/python3.7/site-packages/:$PYTHONPATH tox -e code,flake8
---
project: 'ric_app_ad'
project_creation_date: '2020-08-17'
project_category: ''
lifecycle_state: 'Incubation'
project_lead: &oran_ric_app_ad_ptl
name: 'Sunil Singh'
email: '[email protected]'
company: 'HCL'
id: 'singh.sunil'
timezone: 'Unknown/Unknown'
primary_contact: *oran_ric_app_ad_ptl
issue_tracking:
type: 'jira'
url: 'https://jira.o-ran-sc.org/projects/'
key: 'ric_app_ad'
mailing_list:
type: 'groups.io'
url: 'https://lists.o-ran-sc.org/g/main'
tag: '[]'
realtime_discussion:
type: 'irc'
server: 'freenode.net'
channel: '#oran'
meetings:
- type: 'gotomeeting+irc'
agenda: 'https://wiki.o-ran-sc.org/display/'
url: ''
server: 'freenode.net'
channel: '#oran'
repeats: ''
time: ''
repositories:
- ric-app/ad
committers:
- <<: *oran_ric_app_ad_ptl
- name: 'Deepanshu Karnwal'
email: '[email protected]'
company: 'HCL'
id: 'deepanshuk'
timezone: 'Unknown/Unknown'
- name: 'Amit Srivastava'
email: '[email protected]'
company: 'HCL'
id: 'amit.sarnath'
timezone: 'Unknown/Unknown'
- name: 'Sandeep Kumar'
email: '[email protected]'
company: 'HCL'
id: 'sandeepindia'
timezone: 'Unknown/Unknown'
- name: 'Matti Hiltunen'
email: '[email protected]'
company: 'AT&T Labs-Research'
id: 'MattiHiltunen'
timezone: 'America/New_York'
tsc:
# yamllint disable rule:line-length
approval: 'https://lists.o-ran-sc.org/g/toc/message/303'
changes:
- type: ''
name: ''
link: ''
Unless otherwise specified, all software contained herein is licensed
under the Apache License, Version 2.0 (the "Software License");
you may not use this software except in compliance with the Software
License. You may obtain a copy of the Software License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the Software License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the Software License for the specific language governing permissions
and limitations under the Software License.
Unless otherwise specified, all documentation contained herein is licensed
under the Creative Commons License, Attribution 4.0 Intl. (the
"Documentation License"); you may not use this documentation except in
compliance with the Documentation License. You may obtain a copy of the
Documentation License at
https://creativecommons.org/licenses/by/4.0/
Unless required by applicable law or agreed to in writing, documentation
distributed under the Documentation License is distributed on an "AS IS"
BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the Documentation License for the specific language governing
permissions and limitations under the Documentation License.
# ==================================================================================
# Copyright (c) 2020 HCL Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
Usage of all the programs and files have been mentioned below for the reference.
For AD xapp we require UEReport (UE related dataset)
AD xApp expect UE data from influxDB database in following structure:
* There exists database with name "RIC-Test"
* Inside "RIC-Test" database we have measurments namely "UEReports"
Note: *We need to update ad_config.ini with influxdb configuration.
Update host as one of the following:
1. influxdb service ruuning in RIC platform (host = <service name>.<namespace>)
OR IP of influxdb pod
2. Update user and password for influxDB instance
To polpulate influxdb with static data provided in .csv (ue.csv).
1. Run "python3 insert.py"
2. Wait for few minutes before deploying AD xApp
Note: This will be depreciated in next release when there will be data coming from KPIMON
AD xApp performs following:
* Initiates xapp api, make connection with influxDB and runs the entry() using xapp.run()
* If Model is not present in the current path,
a) Read history data from InfluxDB
b) apply pre-processing steps
c) trigger Training of ML model.
d) after model validation, save transformation, model artifacts
* Detect anomalous user in real-time.
a) Read live data from influxDB every 0.5 second
b) Detect anomalous records on given input
c) Investigate degradation type for anomalous users
* Listens to RMR port for A1 policy (message type 20011) in a format given below. Which consists throughput threshold parameter (default: 70%) for an degradataion event to qualify for a handover
{'operation': 'CREATE', 'payload': '{\"thp_threshold\":74}', 'policy_instance_id': 'demo-1', 'policy_type_id': '9997'}"}
* Send the ue-id, DU-ID, Degradation type and timestamp for the qualified anomalous records to the Traffic Steering (via rmr with the message type as 30003)
* Get the acknowledgement message from the traffic steering
* store xApp result in "AD" measurement of influxDB
Note: Need to implement the logic if we do not get the acknowledgment from the TS. (How xapp api handle this?)
# The Jenkins job uses this string for the tag in the image name
# for example nexus3.o-ran-sc.org:10004/my-image-name:my-tag
---
tag: 1.0.0
from docs_conf.conf import *
linkcheck_ignore = ["http://localhost.*", "http://127.0.0.1.*", "https://gerrit.o-ran-sc.org.*"]
---
project_cfg: oran
project: ric-app-ad
# ==================================================================================
# Copyright (c) 2020 HCL Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
Developers Guide
=================
.. contents::
:depth: 3
:local:
Version bumping the Xapp
------------------------
This project follows semver. When changes are made, update the version strings in:
#. ``container-tag.yaml``
#. ``docs/release-notes.rst``
#. ``setup.py``
#. ``xapp-descriptor/config.json``
Testing RMR Healthcheck
-----------------------
The following instructions should deploy the AD container in bare docker, and allow you
to test that the RMR healthcheck is working.
::
docker build -t ad:latest -f Dockerfile .
docker run -d --net=host -e USE_FAKE_SDL=1 ad:latest
docker exec -it CONTAINER_ID /usr/local/bin/rmr_probe -h 127.0.0.1:4560
Unit Testing
------------
Running the unit tests requires the python packages ``tox`` and ``pytest``.
The RMR library is also required during unit tests. If running directly from tox
(outside a Docker container), install RMR according to its instructions.
Upon completion, view the test coverage like this:
::
tox
open htmlcov/index.html
Alternatively, if you cannot install RMR locally, you can run the unit
tests in Docker. This is somewhat less nice because you don't get the
pretty HTML report on coverage.
::
docker build --no-cache -f Dockerfile-Unit-Test .
# ==================================================================================
# Copyright (c) 2020 HCL Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
AD xApp
==================
.. toctree::
:maxdepth: 2
:caption: Contents:
overview.rst
developers-guide.rst
release-notes.rst
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
Anomaly Detection Overview
==========================
Anomaly Detection (AD) is an Xapp in the Traffic Steering O-RAN use case,
which perfrom the following tasks:
#. Data will be inserted into influxDB when xApp starts. This will be removed in Future when data will be coming via KPIMON to influxDB.
#. AD, which iterates per 10 mili-second, fetches UE information from databse and send prediction to Traffic Steering
#. Traffic Steering send acknowldgement back to AD.
Expected Input
--------------
The AD Xapp expects input in following structure:
{
'du-id' : 1003,
'nrCellIdentity' : "c3/B13",
'prb_usage' : 23.0,
'rsrp' : 84.0,
'rsrq' : 65.0,
'rssinr':65.0,
'targetTput' : 0.1,
'throughput' : ,
'ue-id' : "Waiting passenger 1",
'x' : -556,
'y' : -1160,
'measTimeStampRf' : "2021-05-12T07:43:51.652"
}
Expected Output
---------------
The AD Xapp should Detect Anomulous UE's and send those UE's information
as a JSON message via RMR with the following structure:
{
'ue-id' : "Waiting passenger 1",
'measTimeStampRf' : "2021-05-12T07:43:51.652",
'du-id' : 1003,
'Degradation': "RSRP RSSINR"
}
.# ==================================================================================
# Copyright (c) 2020 HCL Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
Release Notes
===============
All notable changes to this project will be documented in this file.
The format is based on `Keep a Changelog <http://keepachangelog.com/>`__
and this project adheres to `Semantic Versioning <http://semver.org/>`__.
[1.0.1] - 2023-06-28
--------------------
* Release version 1.0.1 (`RICAPP-215 <https://jira.o-ran-sc.org/browse/RICAPP-215>`_)
[1.0.0] - 2022-12-09
--------------------
* Release version 1.0.0 (`RICAPP-204 <https://jira.o-ran-sc.org/browse/RICAPP-204>`_)
[0.0.2] - 2021-07-5
--------------------
* Release version 0.0.2 (`RICAPP-818 <https://jira.o-ran-sc.org/browse/RIC-818>`_)
[0.0.1] - 2020-08-11
--------------------
* Initial mock version (`RICAPP-142 <https://jira.o-ran-sc.org/browse/RICAPP-142>`_)
sphinx
sphinx-rtd-theme
sphinxcontrib-httpdomain
recommonmark
lfdocs-conf
newrt|start
rte|30003|service-ricxapp-trafficxapp-rmr:4560
rte|20011|service-ricplt-a1mediator-rmr.ricplt:4560
newrt|end
---
distribution_type: container
container_release_tag: 1.0.0
container_pull_registry: nexus3.o-ran-sc.org:10004
container_push_registry: nexus3.o-ran-sc.org:10002
project: ric-app/ad
ref: 77f7c38a2133e3ca11582a217762802d1a14c8fa
containers:
- name: ric-app-ad
version: 1.0.0
# CI script installs RMR from PackageCloud using this version
---
version: 4.0.5
# ==================================================================================
# Copyright (c) 2020 HCL Intellectual Property.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
from setuptools import setup, find_packages
setup(
name="ad",
version="1.0.1",
packages=find_packages(exclude=["tests.*", "tests"]),
description="Anomaly Detection xApp that integrates with Traffic Steering",
url="https://gerrit.o-ran-sc.org/r/admin/repos/ric-app/ad",
install_requires=["ricxappframe>=1.1.1,<2.0.0", "pandas>=1.1.3", "joblib>=0.3.2", "Scikit-learn>=0.18", "mdclogpy<=1.1.1", "schedule>=0.0.0", "influxdb"],
entry_points={"console_scripts": ["run-src.py=src.main:start"]}, # adds a magical entrypoint for Docker
license="Apache 2.0",
data_files=[("", ["LICENSE.txt"])],
)
# in __init__.py
# ==================================================================================
# Copyright (c) 2020 HCL Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
[influxdb]
host = ricplt-influxdb.ricplt
port = 8086
user = admin
password =
path =
database = RIC-Test
measurement = UEReports
ssl = False
[features]
thpt = DRB.UEThpDl
rsrp = RF.serving.RSRP
rsrq = RF.serving.RSRQ
rssinr = RF.serving.RSSINR
prb_usage = RRU.PrbUsedDl
ue = ue-id
anomaly = Viavi.UE.anomalies
a1_param = thp_threshold
# ==================================================================================
# Copyright (c) 2020 HCL Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
import joblib
from mdclogpy import Logger
logger = Logger(name=__name__)
class modelling(object):
r""" Filter dataframe based on paramters that were used to train model
use transormer to transform the data
load model and predict the label(normal/anomalous)
Parameters:
data:DataFrame
"""
def __init__(self, data=None):
self.data = data
self.load_model()
self.load_param()
self.load_scale()
def load_model(self):
try:
with open('src/model', 'rb') as f:
self.model = joblib.load(f)
except FileNotFoundError:
logger.error("Model Does not exsist")
def load_param(self):
try:
with open('src/num_params', 'rb') as f:
self.num = joblib.load(f)
except FileNotFoundError:
logger.error("Parameter file does not exsist")
def load_scale(self):
try:
with open('src/scale', 'rb') as f:
self.scale = joblib.load(f)
except FileNotFoundError:
logger.error("Scale file does not exsist")
def transformation(self):
self.data = self.scale.transform(self.data)
def predict(self, df):
""" Load the saved model and return predicted result.
Parameters
.........
name:str
name of model
Return
......
pred:int
predict label on a given sample
"""
self.data = df.loc[:, self.num]
self.transformation()
pred = self.model.predict(self.data)
pred = [1 if p == -1 else 0 for p in pred]
return pred
class CAUSE(object):
r""""Rule basd method to find degradation type of anomalous sample
Attributes:
normal:DataFrame
Dataframe that contains only normal sample
"""
def __init__(self):
self.normal = None
def cause(self, df, db, threshold):
""" Filter normal data for a particular ue-id to compare with a given sample
Compare with normal data to find and return degradaton type
"""
sample = df.copy()
sample.index = range(len(sample))
for i in range(len(sample)):
if sample.iloc[i]['Anomaly'] == 1:
query = """select * from {} where "{}" = \'{}\' and time<now() and time>now()-20s""".format(db.meas, db.ue, sample.iloc[i][db.ue])
normal = db.query(query)
if normal:
normal = normal[db.meas][[db.thpt, db.rsrp, db.rsrq]]
deg = self.find(sample.loc[i, :], normal.max(), db, threshold)
if deg:
sample.loc[i, 'Degradation'] = deg
if 'Throughput' in deg and ('RSRP' in deg or 'RSRQ' in deg):
sample.loc[i, 'Anomaly'] = 2
else:
sample.loc[i, 'Anomaly'] = 0
return sample[['Anomaly', 'Degradation']].values.tolist()
def find(self, row, l, db, threshold):
""" store if a particular parameter is below threshold and return """
deg = []
if row[db.thpt] < l[db.thpt]*(100 - threshold)*0.01:
deg.append('Throughput')
if row[db.rsrp] < l[db.rsrp]-15:
deg.append('RSRP')
if row[db.rsrq] < l[db.rsrq]-10:
deg.append('RSRQ')
if len(deg) == 0:
deg = False
else:
deg = ' '.join(deg)
return deg
# ==================================================================================
# Copyright (c) 2020 HCL Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
import joblib
import time
import numpy as np
from processing import PREPROCESS
from sklearn.metrics import classification_report, f1_score
from sklearn.ensemble import IsolationForest
from sklearn.model_selection import RandomizedSearchCV
from mdclogpy import Logger
logger = Logger(name=__name__)
class ModelTraining(object):
r""" The modelling class takes input as dataframe or array and train Isolation Forest model
Paramteres
.........
data: DataFrame or array
input dataset
cols: list
list of parameters in input dataset
Attributes
----------
actual:array
actual label for test data
X: DataFrame or array
transformed values of input data
"""
def __init__(self, db):
self.db = db
self.train_data = None
self.test_data = None
self.read_train()
self.read_test()
def read_train(self):
self.db.read_data(train=True)
while self.db.data is None or len(self.db.data.dropna()) < 1000:
logger.warning("Check if InfluxDB instance is up / Not sufficient data for Training")
time.sleep(120)
self.db.read_data(train=True)
self.train_data = self.db.data
logger.debug("Training on {} Samples".format(self.train_data.shape[0]))
def read_test(self):
""" Read test dataset for model validation"""
self.db.read_data(valid=True)
while self.db.data is None or len(self.db.data.dropna()) < 300:
logger.warning("Check if InfluxDB instance is up? or Not sufficient data for Validation in last 10 minutes")
time.sleep(60)
self.db.read_data(valid=True)
self.test_data = self.db.data.dropna()
logger.debug("Validation on {} Samples".format(self.test_data.shape[0]))
def isoforest(self, outliers_fraction=0.05, random_state=4):
""" Train isolation forest
Parameters
----------
outliers_fraction: float between 0.01 to 0.5 (default=0.05)
percentage of anomalous available in input data
push_model: boolean (default=False)
return f_1 score if True else push model into repo
random_state: int (default=42)
"""
parameter = {'contamination': [of for of in np.arange(0.01, 0.5, 0.02)],
'n_estimators': [100*(i+1) for i in range(1, 10)],
'max_samples': [0.005, 0.01, 0.1, 0.15, 0.2, 0.3, 0.4]}
cv = [(slice(None), slice(None))]
iso = IsolationForest(random_state=random_state, bootstrap=True, warm_start=False)
model = RandomizedSearchCV(iso, parameter, scoring=self.validate, cv=cv, n_iter=50)
md = model.fit(self.train_data.values)
f1 = self.validate(md.best_estimator_, self.test_data, True)
return f1, md.best_estimator_
def validate(self, model, test_data, report=False):
pred = model.predict(self.test_data.values)
if -1 in pred:
pred = [1 if p == -1 else 0 for p in pred]
F1 = f1_score(self.actual, pred, average='macro')
if report:
logger.debug("classfication report : {} ".format(classification_report(self.actual, pred)))
logger.debug("F1 score:{}".format(F1))
return F1
def train(self):
"""
Main function to perform training on input data
"""
logger.debug("Training Starts")
ps = PREPROCESS(self.train_data)
ps.process()
self.train_data = ps.data
self.actual = (self.test_data[self.db.anomaly] > 0).astype(int)
num = joblib.load('src/num_params')
ps = PREPROCESS(self.test_data[num])
ps.transform()
self.test_data = ps.data
scores = []
models = []
logger.info("Training Isolation Forest")
f1, model = self.isoforest()
scores.append(f1)
models.append(model)
opt = scores.index(max(scores))
joblib.dump(models[opt], 'src/model')
logger.info("Optimum f-score : {}".format(scores[opt]))
logger.info("Training Ends : ")
# ==================================================================================
# Copyright (c) 2020 HCL Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
import time
import pandas as pd
from influxdb import DataFrameClient
from configparser import ConfigParser
from mdclogpy import Logger
from influxdb.exceptions import InfluxDBClientError, InfluxDBServerError
from requests.exceptions import RequestException, ConnectionError
logger = Logger(name=__name__)
class DATABASE(object):
r""" DATABASE takes an input as database name. It creates a client connection
to influxDB and It reads/ writes UE data for a given dabtabase and a measurement.
Parameters
----------
host: str (default='r4-influxdb.ricplt.svc.cluster.local')
hostname to connect to InfluxDB
port: int (default='8086')
port to connect to InfluxDB
username: str (default='root')
user to connect
password: str (default='root')
password of the use
Attributes
----------
client: influxDB client
DataFrameClient api to connect influxDB
data: DataFrame
fetched data from database
"""
def __init__(self, dbname='Timeseries', user='root', password='root', host="r4-influxdb.ricplt", port='8086', path='', ssl=False):
self.data = None
self.host = host
self.port = port
self.user = user
self.password = password
self.path = path
self.ssl = ssl
self.dbname = dbname
self.client = None
self.config()
def connect(self):
if self.client is not None:
self.client.close()
try:
self.client = DataFrameClient(self.host, port=self.port, username=self.user, password=self.password, path=self.path, ssl=self.ssl, database=self.dbname, verify_ssl=self.ssl)
version = self.client.request('ping', expected_response_code=204).headers['X-Influxdb-Version']
logger.info("Conected to Influx Database, InfluxDB version : {}".format(version))
return True
except (RequestException, InfluxDBClientError, InfluxDBServerError, ConnectionError):
logger.error("Failed to establish a new connection with InflulxDB, Please check your url/hostname")
time.sleep(120)
def read_data(self, train=False, valid=False, limit=False):
"""Read data method for a given measurement and limit
Parameters
----------
meas: str (default='ueMeasReport')
limit:int (defualt=False)
"""
self.data = None
query = 'select * from ' + self.meas
if not train and not valid and not limit:
query += ' where time>now()-1600ms'
elif train:
query += ' where time<now()-5m and time>now()-75m'
elif valid:
query += ' where time>now()-5m'
elif limit:
query += ' where time>now()-1m limit '+str(limit)
result = self.query(query)
if result and len(result[self.meas]) != 0:
self.data = result[self.meas]
def write_anomaly(self, df, meas='AD'):
"""Write data method for a given measurement
Parameters
----------
meas: str (default='AD')
"""
try:
self.client.write_points(df, meas)
except (RequestException, InfluxDBClientError, InfluxDBServerError) as e:
logger.error('Failed to send metrics to influxdb')
print(e)
def query(self, query):
try:
result = self.client.query(query)
except (RequestException, InfluxDBClientError, InfluxDBServerError, ConnectionError) as e:
logger.error('Failed to connect to influxdb: {}'.format(e))
result = False
return result
def config(self):
cfg = ConfigParser()
cfg.read('src/ad_config.ini')
for section in cfg.sections():
if section == 'influxdb':
self.host = cfg.get(section, "host")
self.port = cfg.get(section, "port")
self.user = cfg.get(section, "user")
self.password = cfg.get(section, "password")
self.path = cfg.get(section, "path")
self.ssl = cfg.get(section, "ssl")
self.dbname = cfg.get(section, "database")
self.meas = cfg.get(section, "measurement")
if section == 'features':
self.thpt = cfg.get(section, "thpt")
self.rsrp = cfg.get(section, "rsrp")
self.rsrq = cfg.get(section, "rsrq")
self.rssinr = cfg.get(section, "rssinr")
self.prb = cfg.get(section, "prb_usage")
self.ue = cfg.get(section, "ue")
self.anomaly = cfg.get(section, "anomaly")
self.a1_param = cfg.get(section, "a1_param")
class DUMMY(DATABASE):
def __init__(self):
super().__init__()
self.ue_data = pd.read_csv('src/ue.csv')
def connect(self):
return True
def read_data(self, train=False, valid=False, limit=100000):
if not train:
self.data = self.ue_data.head(limit)
else:
self.data = self.ue_data.head(limit).drop(self.anomaly, axis=1)
def write_anomaly(self, df, meas_name='AD'):
pass
def query(self, query=None):
return {'UEReports': self.ue_data.head(1)}
# ==================================================================================
# Copyright (c) 2020 HCL Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
class Error(Exception):
"""Base class for other exceptions"""
pass
class NoDataError(BaseException):
"""Raised when there is no data available in database for a given measurment"""
pass
# ==================================================================================
# Copyright (c) 2020 HCL Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
"""
This Module is temporary for pushing data into influxdb before dpeloyment of AD xApp. It will depreciated in future, when data will be coming through KPIMON
"""
import datetime
import time
import pandas as pd
from database import DATABASE
from configparser import ConfigParser
class INSERTDATA(DATABASE):
def __init__(self):
super().__init__()
self.config()
self.connect()
# self.dropdb('RIC-Test')
# self.createdb('RIC-Test')
def config(self):
cfg = ConfigParser()
cfg.read('ad_config.ini')
for section in cfg.sections():
if section == 'influxdb':
self.host = cfg.get(section, "host")
self.port = cfg.get(section, "port")
self.user = cfg.get(section, "user")
self.password = cfg.get(section, "password")
self.path = cfg.get(section, "path")
self.ssl = cfg.get(section, "ssl")
self.dbname = cfg.get(section, "database")
self.meas = cfg.get(section, "measurement")
def createdb(self, dbname):
print("Create database: " + dbname)
self.client.create_database(dbname)
self.client.switch_database(dbname)
def dropdb(self, dbname):
print("DROP database: " + dbname)
self.client.drop_database(dbname)
def dropmeas(self, measname):
print("DROP MEASUREMENT: " + measname)
self.client.query('DROP MEASUREMENT '+measname)
def assign_timestamp(self, df):
steps = df['measTimeStampRf'].unique()
for timestamp in steps:
d = df[df['measTimeStampRf'] == timestamp]
d.index = pd.date_range(start=datetime.datetime.now(), freq='1ms', periods=len(d))
self.client.write_points(d, self.meas)
time.sleep(0.7)
def populatedb():
# inintiate connection and create database UEDATA
db = INSERTDATA()
df = pd.read_csv('ue.csv')
while True:
db.assign_timestamp(df)
if __name__ == "__main__":
populatedb()
# ==================================================================================
# Copyright (c) 2020 HCL Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
import json
import os
import time
import pandas as pd
import schedule
from ricxappframe.xapp_frame import Xapp, rmr
from ricxappframe.xapp_sdl import SDLWrapper
from mdclogpy import Logger
from ad_model import modelling, CAUSE
from ad_train import ModelTraining
from database import DATABASE, DUMMY
db = None
cp = None
threshold = None
sdl = SDLWrapper(use_fake_sdl=True)
logger = Logger(name=__name__)
def entry(self):
""" If ML model is not present in the path, It will trigger training module to train the model.
Calls predict function every 10 millisecond(for now as we are using simulated data).
"""
connectdb()
train_model()
load_model()
schedule.every(0.5).seconds.do(predict, self)
while True:
schedule.run_pending()
def load_model():
global md
global cp
global threshold
md = modelling()
cp = CAUSE()
threshold = 70
logger.info("throughput threshold parameter is set as {}% (default)".format(threshold))
def train_model():
if not os.path.isfile('src/model'):
mt = ModelTraining(db)
mt.train()
def predict(self):
"""Read the latest ue sample from influxDB and detects if that is anomalous or normal..
Send the UEID, DUID, Degradation type and timestamp for the anomalous samples to Traffic Steering (rmr with the message type as 30003)
Get the acknowledgement of sent message from the traffic steering.
"""
db.read_data()
val = None
if db.data is not None:
if set(md.num).issubset(db.data.columns):
db.data = db.data.dropna(axis=0)
if len(db.data) > 0:
val = predict_anomaly(self, db.data)
else:
logger.warning("Parameters does not match with of training data")
else:
logger.warning("No data in last 1 second")
time.sleep(1)
if (val is not None) and (len(val) > 2):
msg_to_ts(self, val)
def predict_anomaly(self, df):
""" calls ad_predict to detect if given sample is normal or anomalous
find out the degradation type if sample is anomalous
write given sample along with predicted label to AD measurement
Parameter
........
ue: array or dataframe
Return
......
val: anomalus sample info(UEID, DUID, TimeStamp, Degradation type)
"""
df['Anomaly'] = md.predict(df)
df.loc[:, 'Degradation'] = ''
val = None
if 1 in df.Anomaly.unique():
df.loc[:, ['Anomaly', 'Degradation']] = cp.cause(df, db, threshold)
df_a = df.loc[df['Anomaly'] == 1].copy()
if len(df_a) > 0:
df_a['time'] = df_a.index
cols = [db.ue, 'time', 'Degradation']
# rmr send 30003(TS_ANOMALY_UPDATE), should trigger registered callback
result = json.loads(df_a.loc[:, cols].to_json(orient='records'))
val = json.dumps(result).encode()
df.loc[:, 'RRU.PrbUsedDl'] = df['RRU.PrbUsedDl'].astype('float')
df.index = pd.date_range(start=df.index[0], periods=len(df), freq='1ms')
db.write_anomaly(df)
return val
def msg_to_ts(self, val):
# send message from ad to ts
logger.debug("Sending Anomalous UE to TS")
success = self.rmr_send(val, 30003)
if success:
logger.info(" Message to TS: message sent Successfully")
# rmr receive to get the acknowledgement message from the traffic steering.
for summary, sbuf in self.rmr_get_messages():
if sbuf.contents.mtype == 30004:
logger.info("Received acknowldgement from TS (TS_ANOMALY_ACK): {}".format(summary))
if sbuf.contents.mtype == 20010:
a1_request_handler(self, summary, sbuf)
self.rmr_free(sbuf)
def connectdb(thread=False):
# Create a connection to InfluxDB if thread=True, otherwise it will create a dummy data instance
global db
if thread:
db = DUMMY()
else:
db = DATABASE()
success = False
while not success:
success = db.connect()
def a1_request_handler(self, summary, sbuf):
logger.info("A1 policy received")
try:
req = json.loads(summary[rmr.RMR_MS_PAYLOAD]) # input should be a json encoded as bytes
logger.debug("A1PolicyHandler.resp_handler:: Handler processing request")
except (json.decoder.JSONDecodeError, KeyError):
logger.error("A1PolicyManager.resp_handler:: Handler failed to parse request")
return
if verifyPolicy(req):
logger.info("A1PolicyHandler.resp_handler:: Handler processed request: {}".format(req))
else:
logger.error("A1PolicyHandler.resp_handler:: Request verification failed: {}".format(req))
logger.debug("A1PolicyHandler.resp_handler:: Request verification success: {}".format(req))
change_threshold(self, req)
resp = buildPolicyResp(self, req)
self.rmr_send(json.dumps(resp).encode(), 20011)
logger.info("A1PolicyHandler.resp_handler:: Response sent: {}".format(resp))
self.rmr_free(sbuf)
def change_threshold(self, req: dict):
if req["operation"] == "CREATE":
payload = req["payload"]
threshold = json.loads(payload)[db.a1_param]
logger.info("throughput threshold parameter updated to: {}% ".format(threshold))
def verifyPolicy(req: dict):
for i in ["policy_type_id", "operation", "policy_instance_id"]:
if i not in req:
return False
return True
def buildPolicyResp(self, req: dict):
req["handler_id"] = "ad"
del req["operation"]
del req["payload"]
req["status"] = "OK"
return req
def start(thread=False):
# Initiates xapp api and runs the entry() using xapp.run()
xapp = Xapp(entrypoint=entry, rmr_port=4560, use_fake_sdl=False)
logger.debug("AD xApp starting")
xapp.run()
# ==================================================================================
# Copyright (c) 2020 HCL Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
import pandas as pd
import numpy as np
import joblib
from sklearn.preprocessing import Normalizer
class PREPROCESS(object):
r""" This PREPROCESS class takes raw data and apply prepocessing on to that.
Parameters
----------
data: pandas dataframe
input dataset to process in pandas dataframe
Attributes
----------
data: DataFrame
DataFrame that has processed data
temp: list
list of attributes to drop
"""
def __init__(self, data):
"""
Columns that are not useful for the prediction will be dropped(UEID, Category, & Timestamp)
"""
self.data = data
self.convert_gb_to_mb()
def variation(self):
""" drop the constant parameters """
if len(self.data) > 1:
self.data = self.data.loc[:, self.data.apply(pd.Series.nunique) != 1]
def convert_gb_to_mb(self):
self.data.iloc[:]['DRB.UEThpDl'] = self.data['DRB.UEThpDl'].apply(lambda x: x*1024)
def numerical_data(self):
""" Filters only numeric data types """
numerics = ['int16', 'int32', 'int64', 'float16', 'float32', 'float64']
self.data = self.data.select_dtypes(include=numerics)
def drop_na(self):
""" drop observations having nan values """
self.data = self.data.dropna(axis=0)
def correlation(self):
""" check and drop high correlation parameters """
corr = self.data.corr().abs()
corr = pd.DataFrame(np.tril(corr, k=-1), columns=self.data.columns)
drop = [column for column in corr.columns if any(corr[column] > 0.98)]
self.data = self.data.drop(drop, axis=1)
# check skewness of all parameters and use log transform if half of parameters are enough skewd
# otherwise use standardization
def fit_transform(self):
""" use normalizer transformation to bring all parameters in same scale """
scale = Normalizer().fit(self.data)
joblib.dump(scale, 'src/scale')
def transform(self):
scale = joblib.load('src/scale')
self.data = pd.DataFrame(scale.transform(self.data), columns=self.data.columns)
def save_cols(self):
joblib.dump(self.data.columns, 'src/num_params')
def process(self):
"""
Calls the modules for the data preprocessing like dropping columns, normalization etc.,
"""
temp = []
for col in self.data.columns:
if 'nb' in col or 'Geo' in col or 'anomal' in col or 'target' in col:
temp.append(col)
self.data = self.data.drop(temp, axis=1)
self.numerical_data()
self.drop_na()
self.variation()
self.correlation()
self.fit_transform()
self.transform()
self.save_cols()
This diff is collapsed.
# in __init__.py
# ==================================================================================
# Copyright (c) 2020 HCL Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
import pytest
import pandas as pd
@pytest.fixture
def ad_to_ts():
ad_to_ts_val = '[{"du-id": 1006, "ue-id": "Car-1", "measTimeStampRf": 1620832626630, "Degradation": "RSRP"}]'
return ad_to_ts_val
@pytest.fixture
def ad_ue():
ad_ue_val = pd.DataFrame([[1002, "c2/B13", 8, 69, 65, 113, 0.1, 0.1, "Waiting passenger 9", -882, -959, pd.to_datetime("2021-05-12T07:43:51.652")]], columns=["du-id", "ServingCellId", "RRU.PrbUsedDl", "RF.serving.RSRP", "RF.serving.RSRQ", "RF.serving.RSSINR", "TargetTput", "DRB.UEThpDl", "ue-id", "x", "y", "measTimeStampRf"])
return ad_ue_val
# ==================================================================================
# Copyright (c) 2020 HCL Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
newrt|start
rte|30003|service-ricxapp-trafficxapp-rmr.ricxapp.svc.cluster.local:4560
newrt|end
# ==================================================================================
# Copyright (c) 2020 HCL Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
newrt|start
rte|30003|127.0.0.1:4560
newrt|end
# ==================================================================================
# Copyright (c) 2020 HCL Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
import json
from src import main
from ricxappframe.xapp_frame import Xapp
from contextlib import suppress
def test_database_connection(monkeypatch):
# start ad
main.connectdb(thread=True)
def test_trainModel(monkeypatch):
main.train_model()
def test_predict_anomaly(monkeypatch, ad_ue):
main.load_model()
main.predict_anomaly(monkeypatch, ad_ue)
def test_msg_to_ts(monkeypatch, ad_to_ts):
def mock_ad_entry(self):
val = json.dumps(ad_to_ts).encode()
self.rmr_send(val, 30003)
global mock_ad_xapp
mock_ad_xapp = Xapp(entrypoint=mock_ad_entry, rmr_port=4564, use_fake_sdl=True)
mock_ad_xapp.run() # this will return since mock_ad_entry isn't a loop # this will return since mock_ad_entry isn't a loop
def teardown_module():
"""
this is like a "finally"; the name of this function is pytest magic
safer to put down here since certain failures above can lead to pytest never returning
for example if an exception gets raised before stop is called in any test function above,
pytest will hang forever
"""
with suppress(Exception):
mock_ad_xapp.stop()
# Copyright (c) 2020 HCL Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================
[tox]
envlist = code,flake8,docs,docs-linkcheck
minversion = 2.0
[testenv:code]
basepython = python3
deps=
pytest
coverage
pytest-cov
setenv =
LD_LIBRARY_PATH = /usr/local/lib/:/usr/local/lib64
PYTHONPATH = {toxinidir}:src:/usr/lib/python3.7/site-packages/
RMR_SEED_RT = tests/fixtures/test_local.rt
RMR_ASYNC_CONN = 0
USE_FAKE_SDL = 1
commands =
pytest -v --cov src --cov-report xml --cov-report term-missing --cov-report html --cov-fail-under=50
coverage xml -i
[testenv:flake8]
basepython = python3
skip_install = true
deps = flake8
setenv =
PYTHONPATH = {toxinidir}:src:/usr/lib/python3.7/site-packages/
commands = flake8 setup.py src tests
[flake8]
extend-ignore = E501,E741,E731
[testenv:clm]
# use pip to gather dependencies with versions for CLM analysis
whitelist_externals = sh
commands = sh -c 'pip freeze > requirements.txt'
# doc jobs
[testenv:docs]
whitelist_externals = echo
skipsdist = true
basepython = python3
deps =
sphinx
sphinx-rtd-theme
sphinxcontrib-httpdomain
recommonmark
lfdocs-conf
urllib3~=1.26.15
allowlist_externals = echo
commands =
sphinx-build -W -b html -n -d {envtmpdir}/doctrees ./docs/ {toxinidir}/docs/_build/html
echo "Generated docs available in {toxinidir}/docs/_build/html"
[testenv:docs-linkcheck]
skipsdist = true
basepython = python3
deps = sphinx
sphinx-rtd-theme
sphinxcontrib-httpdomain
recommonmark
lfdocs-conf
urllib3~=1.26.15
commands = sphinx-build -W -b linkcheck -d {envtmpdir}/doctrees ./docs/ {toxinidir}/docs/_build/linkcheck
{
"xapp_name": "ad",
"version": "1.0.1",
"containers": [
{
"name": "ad",
"image": {
"registry": "harbor.prlab.io",
"name": "study_group/ric-app-ad",
"tag": "1.0.0"
}
}
],
"messaging": {
"ports": [
{
"name": "rmr-data",
"container": "ad",
"port": 4560,
"txMessages": ["TS_ANOMALY_UPDATE"],
"rxMessages": ["TS_ANOMALY_ACK"],
"policies": [],
"description": "rmr receive data port for ad"
},
{
"name": "rmr-route",
"container": "ad",
"port": 4561,
"description": "rmr route port for ad"
}
]
},
"rmr": {
"protPort": "tcp:4560",
"maxSize": 2072,
"numWorkers": 1,
"rxMessages": ["TS_ANOMALY_ACK"],
"txMessages": ["TS_ANOMALY_UPDATE"],
"policies": []
}
}
{
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "#/controls",
"type": "object",
"title": "Controls Section Schema",
"required": [
],
"properties": {
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment