main
Sergey Revyakin 1 month ago
commit ddb2202429

@ -0,0 +1,17 @@
.git
.vscode
.venv-sdr
__pycache__/
*.pyc
*.pyo
# Heavy host-only SDR sources
gnuradio/
gr-osmosdr/
gr-osmosdr-0.2.6/
# Local runtime artifacts
NN_server/result/
# Legacy install artifacts not needed in docker image
install_scripts/

184
.gitignore vendored

@ -0,0 +1,184 @@
# ---> JupyterNotebooks
# gitignore template for Jupyter Notebooks
# website: http://jupyter.org/
NN_server/NN/
.env
.ipynb_checkpoints
*/.ipynb_checkpoints/*
volk/
torchsig/
gnuradio/
gr_osmosdr/
gr_osmosdr-0.2.6/
/.vscode
/docs
# IPython
profile_default/
ipython_config.py
# Remove previous ipynb_checkpoints
# git rm -r .ipynb_checkpoints/
# ---> Python
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/

@ -0,0 +1,247 @@
<#
.Synopsis
Activate a Python virtual environment for the current PowerShell session.
.Description
Pushes the python executable for a virtual environment to the front of the
$Env:PATH environment variable and sets the prompt to signify that you are
in a Python virtual environment. Makes use of the command line switches as
well as the `pyvenv.cfg` file values present in the virtual environment.
.Parameter VenvDir
Path to the directory that contains the virtual environment to activate. The
default value for this is the parent of the directory that the Activate.ps1
script is located within.
.Parameter Prompt
The prompt prefix to display when this virtual environment is activated. By
default, this prompt is the name of the virtual environment folder (VenvDir)
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
.Example
Activate.ps1
Activates the Python virtual environment that contains the Activate.ps1 script.
.Example
Activate.ps1 -Verbose
Activates the Python virtual environment that contains the Activate.ps1 script,
and shows extra information about the activation as it executes.
.Example
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
Activates the Python virtual environment located in the specified location.
.Example
Activate.ps1 -Prompt "MyPython"
Activates the Python virtual environment that contains the Activate.ps1 script,
and prefixes the current prompt with the specified string (surrounded in
parentheses) while the virtual environment is active.
.Notes
On Windows, it may be required to enable this Activate.ps1 script by setting the
execution policy for the user. You can do this by issuing the following PowerShell
command:
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
For more information on Execution Policies:
https://go.microsoft.com/fwlink/?LinkID=135170
#>
Param(
[Parameter(Mandatory = $false)]
[String]
$VenvDir,
[Parameter(Mandatory = $false)]
[String]
$Prompt
)
<# Function declarations --------------------------------------------------- #>
<#
.Synopsis
Remove all shell session elements added by the Activate script, including the
addition of the virtual environment's Python executable from the beginning of
the PATH variable.
.Parameter NonDestructive
If present, do not remove this function from the global namespace for the
session.
#>
function global:deactivate ([switch]$NonDestructive) {
# Revert to original values
# The prior prompt:
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
}
# The prior PYTHONHOME:
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
}
# The prior PATH:
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
}
# Just remove the VIRTUAL_ENV altogether:
if (Test-Path -Path Env:VIRTUAL_ENV) {
Remove-Item -Path env:VIRTUAL_ENV
}
# Just remove VIRTUAL_ENV_PROMPT altogether.
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
}
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
}
# Leave deactivate function in the global namespace if requested:
if (-not $NonDestructive) {
Remove-Item -Path function:deactivate
}
}
<#
.Description
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
given folder, and returns them in a map.
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
two strings separated by `=` (with any amount of whitespace surrounding the =)
then it is considered a `key = value` line. The left hand string is the key,
the right hand is the value.
If the value starts with a `'` or a `"` then the first and last character is
stripped from the value before being captured.
.Parameter ConfigDir
Path to the directory that contains the `pyvenv.cfg` file.
#>
function Get-PyVenvConfig(
[String]
$ConfigDir
) {
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
# An empty map will be returned if no config file is found.
$pyvenvConfig = @{ }
if ($pyvenvConfigPath) {
Write-Verbose "File exists, parse `key = value` lines"
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
$pyvenvConfigContent | ForEach-Object {
$keyval = $PSItem -split "\s*=\s*", 2
if ($keyval[0] -and $keyval[1]) {
$val = $keyval[1]
# Remove extraneous quotations around a string value.
if ("'""".Contains($val.Substring(0, 1))) {
$val = $val.Substring(1, $val.Length - 2)
}
$pyvenvConfig[$keyval[0]] = $val
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
}
}
}
return $pyvenvConfig
}
<# Begin Activate script --------------------------------------------------- #>
# Determine the containing directory of this script
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
$VenvExecDir = Get-Item -Path $VenvExecPath
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
# Set values required in priority: CmdLine, ConfigFile, Default
# First, get the location of the virtual environment, it might not be
# VenvExecDir if specified on the command line.
if ($VenvDir) {
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
}
else {
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
Write-Verbose "VenvDir=$VenvDir"
}
# Next, read the `pyvenv.cfg` file to determine any required value such
# as `prompt`.
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
# Next, set the prompt from the command line, or the config file, or
# just use the name of the virtual environment folder.
if ($Prompt) {
Write-Verbose "Prompt specified as argument, using '$Prompt'"
}
else {
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
$Prompt = $pyvenvCfg['prompt'];
}
else {
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
$Prompt = Split-Path -Path $venvDir -Leaf
}
}
Write-Verbose "Prompt = '$Prompt'"
Write-Verbose "VenvDir='$VenvDir'"
# Deactivate any currently active virtual environment, but leave the
# deactivate function in place.
deactivate -nondestructive
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
# that there is an activated venv.
$env:VIRTUAL_ENV = $VenvDir
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
Write-Verbose "Setting prompt to '$Prompt'"
# Set the prompt to include the env name
# Make sure _OLD_VIRTUAL_PROMPT is global
function global:_OLD_VIRTUAL_PROMPT { "" }
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
function global:prompt {
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
_OLD_VIRTUAL_PROMPT
}
$env:VIRTUAL_ENV_PROMPT = $Prompt
}
# Clear PYTHONHOME
if (Test-Path -Path Env:PYTHONHOME) {
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
Remove-Item -Path Env:PYTHONHOME
}
# Add the venv to the PATH
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"

@ -0,0 +1,70 @@
# This file must be used with "source bin/activate" *from bash*
# You cannot run it directly
deactivate () {
# reset old environment variables
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
PATH="${_OLD_VIRTUAL_PATH:-}"
export PATH
unset _OLD_VIRTUAL_PATH
fi
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
export PYTHONHOME
unset _OLD_VIRTUAL_PYTHONHOME
fi
# Call hash to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
hash -r 2> /dev/null
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
PS1="${_OLD_VIRTUAL_PS1:-}"
export PS1
unset _OLD_VIRTUAL_PS1
fi
unset VIRTUAL_ENV
unset VIRTUAL_ENV_PROMPT
if [ ! "${1:-}" = "nondestructive" ] ; then
# Self destruct!
unset -f deactivate
fi
}
# unset irrelevant variables
deactivate nondestructive
# on Windows, a path can contain colons and backslashes and has to be converted:
if [ "${OSTYPE:-}" = "cygwin" ] || [ "${OSTYPE:-}" = "msys" ] ; then
# transform D:\path\to\venv to /d/path/to/venv on MSYS
# and to /cygdrive/d/path/to/venv on Cygwin
export VIRTUAL_ENV=$(cygpath /home/sibscience-4/from_ssh/DroneDetector/.venv-sdr)
else
# use the path as-is
export VIRTUAL_ENV=/home/sibscience-4/from_ssh/DroneDetector/.venv-sdr
fi
_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/"bin":$PATH"
export PATH
# unset PYTHONHOME if set
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
# could use `if (set -u; : $PYTHONHOME) ;` in bash
if [ -n "${PYTHONHOME:-}" ] ; then
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
unset PYTHONHOME
fi
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
_OLD_VIRTUAL_PS1="${PS1:-}"
PS1='(.venv-sdr) '"${PS1:-}"
export PS1
VIRTUAL_ENV_PROMPT='(.venv-sdr) '
export VIRTUAL_ENV_PROMPT
fi
# Call hash to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
hash -r 2> /dev/null

@ -0,0 +1,27 @@
# This file must be used with "source bin/activate.csh" *from csh*.
# You cannot run it directly.
# Created by Davide Di Blasi <davidedb@gmail.com>.
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
# Unset irrelevant variables.
deactivate nondestructive
setenv VIRTUAL_ENV /home/sibscience-4/from_ssh/DroneDetector/.venv-sdr
set _OLD_VIRTUAL_PATH="$PATH"
setenv PATH "$VIRTUAL_ENV/"bin":$PATH"
set _OLD_VIRTUAL_PROMPT="$prompt"
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
set prompt = '(.venv-sdr) '"$prompt"
setenv VIRTUAL_ENV_PROMPT '(.venv-sdr) '
endif
alias pydoc python -m pydoc
rehash

@ -0,0 +1,69 @@
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
# (https://fishshell.com/). You cannot run it directly.
function deactivate -d "Exit virtual environment and return to normal shell environment"
# reset old environment variables
if test -n "$_OLD_VIRTUAL_PATH"
set -gx PATH $_OLD_VIRTUAL_PATH
set -e _OLD_VIRTUAL_PATH
end
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
set -e _OLD_VIRTUAL_PYTHONHOME
end
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
set -e _OLD_FISH_PROMPT_OVERRIDE
# prevents error when using nested fish instances (Issue #93858)
if functions -q _old_fish_prompt
functions -e fish_prompt
functions -c _old_fish_prompt fish_prompt
functions -e _old_fish_prompt
end
end
set -e VIRTUAL_ENV
set -e VIRTUAL_ENV_PROMPT
if test "$argv[1]" != "nondestructive"
# Self-destruct!
functions -e deactivate
end
end
# Unset irrelevant variables.
deactivate nondestructive
set -gx VIRTUAL_ENV /home/sibscience-4/from_ssh/DroneDetector/.venv-sdr
set -gx _OLD_VIRTUAL_PATH $PATH
set -gx PATH "$VIRTUAL_ENV/"bin $PATH
# Unset PYTHONHOME if set.
if set -q PYTHONHOME
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
set -e PYTHONHOME
end
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
# fish uses a function instead of an env var to generate the prompt.
# Save the current fish_prompt function as the function _old_fish_prompt.
functions -c fish_prompt _old_fish_prompt
# With the original prompt function renamed, we can override with our own.
function fish_prompt
# Save the return status of the last command.
set -l old_status $status
# Output the venv prompt; color taken from the blue of the Python logo.
printf "%s%s%s" (set_color 4B8BBE) '(.venv-sdr) ' (set_color normal)
# Restore the return status of the previous command.
echo "exit $old_status" | .
# Output the original/"old" prompt.
_old_fish_prompt
end
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
set -gx VIRTUAL_ENV_PROMPT '(.venv-sdr) '
end

@ -0,0 +1,6 @@
#!/home/sibscience-4/from_ssh/DroneDetector/.venv-sdr/bin/python3
import sys
from numpy.f2py.f2py2e import main
if __name__ == '__main__':
sys.argv[0] = sys.argv[0].removesuffix('.exe')
sys.exit(main())

@ -0,0 +1,6 @@
#!/home/sibscience-4/from_ssh/DroneDetector/.venv-sdr/bin/python3
import sys
from charset_normalizer.cli import cli_detect
if __name__ == '__main__':
sys.argv[0] = sys.argv[0].removesuffix('.exe')
sys.exit(cli_detect())

@ -0,0 +1,6 @@
#!/home/sibscience-4/from_ssh/DroneDetector/.venv-sdr/bin/python3
import sys
from numpy._configtool import main
if __name__ == '__main__':
sys.argv[0] = sys.argv[0].removesuffix('.exe')
sys.exit(main())

@ -0,0 +1,8 @@
#!/home/sibscience-4/from_ssh/DroneDetector/.venv-sdr/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

@ -0,0 +1,8 @@
#!/home/sibscience-4/from_ssh/DroneDetector/.venv-sdr/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

@ -0,0 +1,8 @@
#!/home/sibscience-4/from_ssh/DroneDetector/.venv-sdr/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

@ -0,0 +1 @@
/usr/bin/python3

@ -0,0 +1,5 @@
home = /usr/bin
include-system-site-packages = true
version = 3.12.3
executable = /usr/bin/python3.12
command = /usr/bin/python3 -m venv --system-site-packages /home/sibscience-4/from_ssh/DroneDetector/.venv-sdr

@ -0,0 +1,272 @@
from tqdm import tqdm
import numpy as np
import random
import os
import re
import gc
class Model(object):
_model_id = 0
_ind_inference = 1
_result_list = dict()
@staticmethod
def get_model_id():
try:
return Model._model_id
except Exception as exc:
print(str(exc))
@staticmethod
def _get_inc_model_id():
try:
Model._model_id += 1
return Model._model_id
except Exception as exc:
print(str(exc))
@staticmethod
def get_ind_inference():
try:
return Model._ind_inference
except Exception as exc:
print(str(exc))
@staticmethod
def get_inc_ind_inference():
try:
Model._ind_inference += 1
except Exception as exc:
print(str(exc))
@staticmethod
def _init_result_list(type_model=''):
try:
Model._result_list[type_model] = {}
except Exception as exc:
print(str(exc))
@staticmethod
def get_result_list():
try:
def get_max(dict_inf=None):
try:
return max([(len(i[0]) if len(i) else 0) for i in dict_inf.values()])
except Exception as error:
print(str(error))
return 0
max_length_label = max([get_max(i) for i in Model._result_list.values()])
num_inf = max(list(map(int, list(Model._result_list.values())[0].keys())))
max_length_type_model = max([len(i) for i in Model._result_list.keys()])
num_gaps = (max_length_type_model + 4) * (len(Model._result_list) + 1) + 2 * (len(Model._result_list) + 2)
print('_' * num_gaps)
print('||' + ' ' * (max_length_type_model + 4) + '|', end='')
for type_model in Model._result_list.keys():
print('|' + ' ' * ((max_length_type_model - len(type_model)) // 2 + 2), end='')
print(type_model, end='')
print(' ' * ((max_length_type_model - len(type_model)) // 2 + 2) + '|', end='')
print('|')
for ind_inf in range(1, num_inf+1):
print('||' + ' ' * ((max_length_type_model - len(str(ind_inf))) // 2 + 2), end='')
print(str(ind_inf) if len(str(ind_inf)) % 2 == 0 else str(ind_inf) + ' ', end='')
print(' ' * ((max_length_type_model - len(str(ind_inf))) // 2 + 2) + '|', end='')
for info_inference in Model._result_list.values():
if len(info_inference[ind_inf]) != 0:
length_gap_left = (max_length_label - len(info_inference[ind_inf][0])) // 2 + (max_length_label - len(info_inference[ind_inf][0])) % 2
length_gap_right = (max_length_label - len(info_inference[ind_inf][0])) // 2 + 1
to_print = (' ' * length_gap_left + info_inference[ind_inf][0] + ' ' * length_gap_right) + (str(info_inference[ind_inf][1])
if len(str(info_inference[ind_inf][1])) != 3 else str(info_inference[ind_inf][1]) + ' ')
else:
to_print = ' ' * max_length_type_model
print('|' + ' ' * ((max_length_type_model - len(to_print)) // 2 + 2), end='')
print(to_print, end='')
print(' ' * ((max_length_type_model - len(to_print)) // 2 + 2) + '|', end='')
print('|')
print('_' * num_gaps)
except Exception as exc:
print(str(exc))
@staticmethod
def _add_in_result_list(type_model='', ind_inference=0, list_to_add=None):
try:
Model._result_list[type_model][ind_inference] = list_to_add
except Exception as exc:
print(str(exc))
def __init__(self, file_model='', file_config='', src_example='', src_result='', type_model='',
build_model_func=None, pre_func=None, inference_func=None, post_func=None, classes=None,
number_synthetic_examples=0, number_src_data_for_one_synthetic_example=0, path_to_src_dataset=''):
try:
self._file_model = file_model
self._file_config = file_config
self._src_example = src_example
self._src_result = src_result
self._type_model = type_model
self._build_model_func = build_model_func
self._pre_func = pre_func
self._inference_func = inference_func
self._post_func = post_func
self._classes = classes
self._num_outputs = len(self._classes.keys())
self._number_synthetic_examples = number_synthetic_examples
self._number_src_data_for_one_synthetic_example = number_src_data_for_one_synthetic_example
self._path_to_src_dataset = path_to_src_dataset
self._data = None
self._shablon = ' Модель ' + str(self._model_id+1) + ' с типом ' + str(self._type_model)
self._model = self._build_model()
self._model_id = Model._get_inc_model_id()
self._init_result_list(type_model=self._type_model)
except Exception as exc:
print(str(exc))
def __str__(self):
try:
if self._model is None:
return self._shablon + ' не работает!' + '\n'
else:
return self._shablon + ' работает!' + '\n'
except Exception as exc:
print(str(exc))
def get_mapping(self):
return list(self._classes.values())
def get_model_name(self):
return self._type_model
def get_shablon(self):
return self._shablon
def get_model(self):
return self._model
def _build_model(self):
try:
print('Инициализация' + self._shablon)
return self._build_model_func(file_model=self._file_model, file_config=self._file_config,
num_classes=len(self._classes))
except Exception as exc:
print(str(exc))
def _prepare_data(self, data=None):
try:
print('Подготовка данных' + self._shablon)
self._data = self._pre_func(data, src=self._src_result, ind_inference=Model.get_ind_inference())
except Exception as exc:
print(str(exc))
def _post_data(self, prediction=None):
print('Постобработка данных' + self._shablon)
self._ind_inference += 1
self._post_func(src=self._src_result, data=self._data, model_id=self._model_id, model_type=self._type_model,
ind_inference=Model.get_ind_inference(), prediction=prediction)
def get_test_inference(self):
try:
self._test_inference()
except Exception as exc:
print(str(exc))
def _create_synthetic_examples(self):
try:
print('#' * 100)
print('Создание синтетических примеров: ' + self._shablon)
path_to_example_directory = os.path.join(self._src_example, self._type_model)
os.mkdir(path_to_example_directory)
for ind in tqdm(range(self._number_synthetic_examples)):
try:
label = self._classes[random.randint(0, self._num_outputs-1)]
path_to_src_directory = os.path.join(self._path_to_src_dataset, label)
with open(path_to_src_directory + '/' + os.listdir(path_to_src_directory)[0], 'rb') as data_file:
data = np.frombuffer(data_file.read(), dtype=np.float32)
array_example = np.zeros(np.shape(data))
for _ in range(self._number_src_data_for_one_synthetic_example):
with open(path_to_src_directory + '/' + random.choice(os.listdir(path_to_src_directory)), 'rb') as data_file:
data = np.frombuffer(data_file.read(), dtype=np.float32)
array_example = np.add(array_example, data)
np.save(path_to_example_directory + '/' + label + '_' + str(ind+1), array_example / self._number_src_data_for_one_synthetic_example)
except Exception as exc:
print(str(exc))
print('Создание синтетических примеров завершено!')
print()
except Exception as exc:
print(str(exc))
print()
def _test_inference(self):
try:
self._create_synthetic_examples()
count_access = 1
count_attempt = 1
path_to_example = os.path.join(self._src_example, self._type_model)
_, _, files = next(os.walk(path_to_example))
if files:
ind_inference = 0
for file in files:
with open(path_to_example + '/' + file, 'rb') as data_file:
self._data = np.frombuffer(data_file.read(), dtype=np.float32)
print()
self._prepare_data(data=self._data)
print('Тестовый инференс' + self._shablon + ' попытка ' + str(count_attempt))
prediction, probability = self._inference_func(data=self._data, model=self._model, mapping=self._classes,
shablon=self._shablon)
for value in self._classes.values():
if value in re.split('[._/]', file):
if value == prediction:
print('Тест ' + str(count_attempt) + ' пройден!\n')
count_access += 1
else:
print('Тест ' + str(count_attempt) + ' провален!\n')
count_attempt += 1
break
print()
print('Постобработка данных' + self._shablon)
ind_inference += 1
self._post_func(src=path_to_example+'/', data=self._data, ind_inference=ind_inference, model_id=self._model_id, model_type=self._type_model, prediction=prediction)
print('\nТестовый инференс' + self._shablon + ' пройден с результатом ' + str(100 * (count_access - 1) / (count_attempt - 1)) + ' %')
print('#' * 100)
print()
else:
print('\nНет данных для тестового инференса')
print()
except Exception as exc:
print(str(exc))
def get_inference(self, data=None):
try:
return self._inference(data=data)
except Exception as exc:
print(str(exc))
return None
def _inference(self, data=None):
try:
Model._add_in_result_list(type_model=self._type_model, ind_inference=self.get_ind_inference(), list_to_add=[])
self._prepare_data(data=data)
print('Инференс' + self._shablon)
prediction, probability = self._inference_func(data=self._data, model=self._model, mapping=self._classes,
shablon=self._shablon)
Model._add_in_result_list(type_model=self._type_model, ind_inference=self.get_ind_inference(), list_to_add=[prediction, probability])
self._post_data(prediction=prediction)
gc.collect()
return prediction, probability
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,41 @@
import torch
import torch.nn as nn
from torch.nn import functional as F
from torchvision import datasets, transforms
from torchensemble import VotingClassifier
class Ensemble(nn.Module):
def __init__(self):
super(Ensemble, self).__init__()
self.linear1 = nn.Linear(2048, 512)
self.linear2 = nn.Linear(512, 128)
self.linear3 = nn.Linear(128, 32)
self.linear4 = nn.Linear(32, 3)
def forward(self, data):
data = data.view(data.size(0), -1)
output = F.relu(self.linear1(data))
output = F.relu(self.linear2(output))
output = self.linear3(output)
return output
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])
train = datasets.VisionDataset('../Dataset', train=True, download=True, transform=transform)
test = datasets.VisionDataset('../Dataset', train=False, transform=transform)
train_loader = torch.utils.data.DataLoader(train, batch_size=128, shuffle=True)
test_loader = torch.utils.data.DataLoader(test, batch_size=128, shuffle=True)
model = VotingClassifier(estimator=Ensemble, n_estimators=10, cuda=True)
criterion = nn.CrossEntropyLoss()
model.set_criterion(criterion)
model.set_optimizer('Adam', lr=1e-3, weight_decay=5e-4)
model.fit(train_loader, epochs=50, test_loader=test_loader)

@ -0,0 +1,192 @@
from importlib import import_module
import matplotlib.pyplot as plt
import torch.nn as nn
import numpy as np
import matplotlib
import mlconfig
import torch
import cv2
import gc
import io
def pre_func_resnet18(data=None, src ='', ind_inference=0):
try:
matplotlib.use('Agg')
plt.ioff()
figsize = (16, 8)
dpi = 32
# if int(ind_inference) <= 1500:
# np.save(src + '_inference_2400_' + str(ind_inference) + '.npy', data)
fig1 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
sig_real = data[0]
plt.plot(sig_real, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf1 = io.BytesIO()
fig1.savefig(buf1, format="png", dpi=dpi)
buf1.seek(0)
img_arr1 = np.frombuffer(buf1.getvalue(), dtype=np.uint8)
buf1.close()
img1 = cv2.imdecode(img_arr1, 1)
img1 = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
fig2 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
sig_imag = data[1]
plt.plot(sig_imag, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf = io.BytesIO()
fig2.savefig(buf, format="png", dpi=dpi)
buf.seek(0)
img_arr = np.frombuffer(buf.getvalue(), dtype=np.uint8)
buf.close()
img = cv2.imdecode(img_arr, 1)
img2 = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
img = np.asarray([img1, img2], dtype=np.float32)
del sig_real
del sig_imag
del fig1
del fig2
del img1
del img2
del buf
del buf1
del img_arr
del img_arr1
cv2.destroyAllWindows()
gc.collect()
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_resnet18(file_model='', file_config='', num_classes=None):
try:
matplotlib.use('Agg')
plt.ioff()
torch.cuda.empty_cache()
config = mlconfig.load(file_config)
model = getattr(import_module(config.model.architecture.rsplit('.', maxsplit=1)[0]),
config.model.architecture.rsplit('.', maxsplit=1)[1])()
model.conv1 = torch.nn.Sequential(torch.nn.Conv2d(2, 3, kernel_size=(7, 7), stride=(2, 2),
padding=(3, 3), bias=False), model.conv1)
model.fc = nn.Linear(in_features=512, out_features=num_classes, bias=True)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
cv2.destroyAllWindows()
gc.collect()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_resnet18(data=None, model=None, mapping=None, shablon=''):
try:
matplotlib.use('Agg')
plt.ioff()
torch.cuda.empty_cache()
device = 'cuda' if torch.cuda.is_available() else 'cpu'
print(device)
img = torch.unsqueeze(torch.tensor(data), 0).to(device)
with torch.no_grad():
output = model(img)
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
output = output.cpu()
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
del img
del label
del expon
del output
cv2.destroyAllWindows()
gc.collect()
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_resnet18(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
matplotlib.use('Agg')
plt.ioff()
'''
if int(ind_inference) <= 100:
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close(fig)
del fig
del ax
cv2.destroyAllWindows()
gc.collect()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_imag_' + str(model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close(fig)
del fig
del ax
cv2.destroyAllWindows()
gc.collect()
'''
plt.clf()
plt.cla()
plt.close()
cv2.destroyAllWindows()
gc.collect()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,132 @@
from importlib import import_module
import matplotlib.pyplot as plt
import torch.nn as nn
import numpy as np
import mlconfig
import torch
import cv2
import io
def pre_func_resnet18(data=None, src ='', ind_inference=0):
try:
figsize = (16, 8)
dpi = 80
fig1 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
sig_real = data[0]
plt.plot(sig_real, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf1 = io.BytesIO()
fig1.savefig(buf1, format="png", dpi=dpi)
buf1.seek(0)
img_arr1 = np.frombuffer(buf1.getvalue(), dtype=np.uint8)
buf1.close()
img1 = cv2.imdecode(img_arr1, 1)
img1 = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
fig2 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
sig_imag = data[1]
plt.plot(sig_imag, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf = io.BytesIO()
fig2.savefig(buf, format="png", dpi=dpi)
buf.seek(0)
img_arr = np.frombuffer(buf.getvalue(), dtype=np.uint8)
buf.close()
img = cv2.imdecode(img_arr, 1)
img2 = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
img = np.asarray([img1, img2], dtype=np.float32)
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_resnet18(file_model='', file_config='', num_classes=None):
try:
config = mlconfig.load(file_config)
model = getattr(import_module(config.model.architecture.rsplit('.', maxsplit=1)[0]),
config.model.architecture.rsplit('.', maxsplit=1)[1])()
model.conv1 = torch.nn.Sequential(torch.nn.Conv2d(2, 3, kernel_size=(7, 7), stride=(2, 2),
padding=(3, 3), bias=False), model.conv1)
model.fc = nn.Linear(in_features=512, out_features=num_classes, bias=True)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_resnet18(data=None, model=None, mapping=None, shablon=''):
try:
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data), 0).to(device)
with torch.no_grad():
output = model(img)
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_resnet18(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_imag_' + str(model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,132 @@
from importlib import import_module
import matplotlib.pyplot as plt
import torch.nn as nn
import numpy as np
import mlconfig
import torch
import cv2
import io
def pre_func_resnet18(data=None, src ='', ind_inference=0):
try:
figsize = (16, 8)
dpi = 80
fig1 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
sig_real = data[0]
plt.plot(sig_real, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf1 = io.BytesIO()
fig1.savefig(buf1, format="png", dpi=dpi)
buf1.seek(0)
img_arr1 = np.frombuffer(buf1.getvalue(), dtype=np.uint8)
buf1.close()
img1 = cv2.imdecode(img_arr1, 1)
img1 = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
fig2 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
sig_imag = data[1]
plt.plot(sig_imag, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf = io.BytesIO()
fig2.savefig(buf, format="png", dpi=dpi)
buf.seek(0)
img_arr = np.frombuffer(buf.getvalue(), dtype=np.uint8)
buf.close()
img = cv2.imdecode(img_arr, 1)
img2 = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
img = np.asarray([img1, img2], dtype=np.float32)
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_resnet18(file_model='', file_config='', num_classes=None):
try:
config = mlconfig.load(file_config)
model = getattr(import_module(config.model.architecture.rsplit('.', maxsplit=1)[0]),
config.model.architecture.rsplit('.', maxsplit=1)[1])()
model.conv1 = torch.nn.Sequential(torch.nn.Conv2d(2, 3, kernel_size=(7, 7), stride=(2, 2),
padding=(3, 3), bias=False), model.conv1)
model.fc = nn.Linear(in_features=512, out_features=num_classes, bias=True)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_resnet18(data=None, model=None, mapping=None, shablon=''):
try:
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data), 0).to(device)
with torch.no_grad():
output = model(img)
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_resnet18(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_imag_' + str(model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,132 @@
from importlib import import_module
import matplotlib.pyplot as plt
import torch.nn as nn
import numpy as np
import mlconfig
import torch
import cv2
import io
def pre_func_resnet18(data=None, src ='', ind_inference=0):
try:
figsize = (16, 8)
dpi = 80
fig1 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
sig_real = data[0]
plt.plot(sig_real, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf1 = io.BytesIO()
fig1.savefig(buf1, format="png", dpi=dpi)
buf1.seek(0)
img_arr1 = np.frombuffer(buf1.getvalue(), dtype=np.uint8)
buf1.close()
img1 = cv2.imdecode(img_arr1, 1)
img1 = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
fig2 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
sig_imag = data[1]
plt.plot(sig_imag, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf = io.BytesIO()
fig2.savefig(buf, format="png", dpi=dpi)
buf.seek(0)
img_arr = np.frombuffer(buf.getvalue(), dtype=np.uint8)
buf.close()
img = cv2.imdecode(img_arr, 1)
img2 = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
img = np.asarray([img1, img2], dtype=np.float32)
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_resnet18(file_model='', file_config='', num_classes=None):
try:
config = mlconfig.load(file_config)
model = getattr(import_module(config.model.architecture.rsplit('.', maxsplit=1)[0]),
config.model.architecture.rsplit('.', maxsplit=1)[1])()
model.conv1 = torch.nn.Sequential(torch.nn.Conv2d(2, 3, kernel_size=(7, 7), stride=(2, 2),
padding=(3, 3), bias=False), model.conv1)
model.fc = nn.Linear(in_features=512, out_features=num_classes, bias=True)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_resnet18(data=None, model=None, mapping=None, shablon=''):
try:
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data), 0).to(device)
with torch.no_grad():
output = model(img)
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_resnet18(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_imag_' + str(model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,134 @@
from importlib import import_module
import matplotlib.pyplot as plt
import torch.nn as nn
import numpy as np
import mlconfig
import torch
import cv2
import io
def pre_func_resnet18(data=None, src ='', ind_inference=0):
try:
figsize = (16, 8)
dpi = 80
fig1 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
sig_real = data[0]
plt.plot(sig_real, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf1 = io.BytesIO()
fig1.savefig(buf1, format="png", dpi=dpi)
buf1.seek(0)
img_arr1 = np.frombuffer(buf1.getvalue(), dtype=np.uint8)
buf1.close()
img1 = cv2.imdecode(img_arr1, 1)
img1 = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
fig2 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
sig_imag = data[1]
plt.plot(sig_imag, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf = io.BytesIO()
fig2.savefig(buf, format="png", dpi=dpi)
buf.seek(0)
img_arr = np.frombuffer(buf.getvalue(), dtype=np.uint8)
buf.close()
img = cv2.imdecode(img_arr, 1)
img2 = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
img = np.asarray([img1, img2], dtype=np.float32)
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_resnet18(file_model='', file_config='', num_classes=None):
try:
config = mlconfig.load(file_config)
model = getattr(import_module(config.model.architecture.rsplit('.', maxsplit=1)[0]),
config.model.architecture.rsplit('.', maxsplit=1)[1])()
model.conv1 = torch.nn.Sequential(torch.nn.Conv2d(2, 3, kernel_size=(7, 7), stride=(2, 2),
padding=(3, 3), bias=False), model.conv1)
model.fc = nn.Sequential(nn.Linear(in_features=512, out_features=128, bias=True),
nn.Linear(in_features=128, out_features=32, bias=True),
nn.Linear(in_features=32, out_features=num_classes, bias=True))
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_resnet18(data=None, model=None, mapping=None, shablon=''):
try:
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data), 0).to(device)
with torch.no_grad():
output = model(img)
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_resnet18(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_imag_' + str(model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,134 @@
from importlib import import_module
import matplotlib.pyplot as plt
import torch.nn as nn
import numpy as np
import mlconfig
import torch
import cv2
import io
def pre_func_resnet18(data=None, src ='', ind_inference=0):
try:
figsize = (16, 8)
dpi = 80
fig1 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
sig_real = data[0]
plt.plot(sig_real, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf1 = io.BytesIO()
fig1.savefig(buf1, format="png", dpi=dpi)
buf1.seek(0)
img_arr1 = np.frombuffer(buf1.getvalue(), dtype=np.uint8)
buf1.close()
img1 = cv2.imdecode(img_arr1, 1)
img1 = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
fig2 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
sig_imag = data[1]
plt.plot(sig_imag, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf = io.BytesIO()
fig2.savefig(buf, format="png", dpi=dpi)
buf.seek(0)
img_arr = np.frombuffer(buf.getvalue(), dtype=np.uint8)
buf.close()
img = cv2.imdecode(img_arr, 1)
img2 = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
img = np.asarray([img1, img2], dtype=np.float32)
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_resnet18(file_model='', file_config='', num_classes=None):
try:
config = mlconfig.load(file_config)
model = getattr(import_module(config.model.architecture.rsplit('.', maxsplit=1)[0]),
config.model.architecture.rsplit('.', maxsplit=1)[1])()
# model.conv1 = torch.nn.Sequential(torch.nn.Conv2d(2, 3, kernel_size=(7, 7), stride=(2, 2),
# padding=(3, 3), bias=False), model.conv1)
model.fc = nn.Sequential(nn.Linear(in_features=512, out_features=128, bias=True),
nn.Linear(in_features=128, out_features=32, bias=True),
nn.Linear(in_features=32, out_features=num_classes, bias=True))
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_resnet18(data=None, model=None, mapping=None, shablon=''):
try:
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data), 0).to(device)
with torch.no_grad():
output = model(img)
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_resnet18(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_imag_' + str(model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,136 @@
from importlib import import_module
import matplotlib.pyplot as plt
import torch.nn as nn
import numpy as np
import mlconfig
import torch
import cv2
import io
def pre_func_resnet50(data=None, src ='', ind_inference=0):
try:
figsize = (16, 8)
dpi = 80
fig1 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
sig_real = data[0]
plt.plot(sig_real, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf1 = io.BytesIO()
fig1.savefig(buf1, format="png", dpi=dpi)
buf1.seek(0)
img_arr1 = np.frombuffer(buf1.getvalue(), dtype=np.uint8)
buf1.close()
img1 = cv2.imdecode(img_arr1, 1)
img1 = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
fig2 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
sig_imag = data[1]
plt.plot(sig_imag, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf = io.BytesIO()
fig2.savefig(buf, format="png", dpi=dpi)
buf.seek(0)
img_arr = np.frombuffer(buf.getvalue(), dtype=np.uint8)
buf.close()
img = cv2.imdecode(img_arr, 1)
img2 = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
img = np.asarray([img1, img2], dtype=np.float32)
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_resnet50(file_model='', file_config='', num_classes=None):
try:
config = mlconfig.load(file_config)
model = getattr(import_module(config.model.architecture.rsplit('.', maxsplit=1)[0]),
config.model.architecture.rsplit('.', maxsplit=1)[1])()
model.conv1 = torch.nn.Sequential(torch.nn.Conv2d(2, 3, kernel_size=(7, 7), stride=(2, 2),
padding=(3, 3), bias=False), model.conv1)
model.fc = nn.Sequential(
nn.Linear(in_features=2048, out_features=512, bias=True),
nn.Linear(in_features=512, out_features=128, bias=True),
nn.Linear(in_features=128, out_features=num_classes, bias=True)
)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_resnet50(data=None, model=None, mapping=None, shablon=''):
try:
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data), 0).to(device)
with torch.no_grad():
output = model(img)
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_resnet50(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_imag_' + str(model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,135 @@
from importlib import import_module
import matplotlib.pyplot as plt
import torch.nn as nn
import numpy as np
import mlconfig
import torch
import cv2
import io
def pre_func_resnet50(data=None, src ='', ind_inference=0):
try:
figsize = (16, 8)
dpi = 80
fig1 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
sig_real = data[0]
plt.plot(sig_real, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf1 = io.BytesIO()
fig1.savefig(buf1, format="png", dpi=dpi)
buf1.seek(0)
img_arr1 = np.frombuffer(buf1.getvalue(), dtype=np.uint8)
buf1.close()
img1 = cv2.imdecode(img_arr1, 1)
img1 = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
fig2 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
sig_imag = data[1]
plt.plot(sig_imag, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf = io.BytesIO()
fig2.savefig(buf, format="png", dpi=dpi)
buf.seek(0)
img_arr = np.frombuffer(buf.getvalue(), dtype=np.uint8)
buf.close()
img = cv2.imdecode(img_arr, 1)
img2 = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
img = np.asarray([img1, img2], dtype=np.float32)
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_resnet50(file_model='', file_config='', num_classes=None):
try:
config = mlconfig.load(file_config)
model = getattr(import_module(config.model.architecture.rsplit('.', maxsplit=1)[0]),
config.model.architecture.rsplit('.', maxsplit=1)[1])()
model.conv1 = torch.nn.Sequential(torch.nn.Conv2d(2, 3, kernel_size=(7, 7), stride=(2, 2),
padding=(3, 3), bias=False), model.conv1)
model.fc = nn.Sequential(nn.Linear(in_features=2048, out_features=512, bias=True),
nn.Linear(in_features=512, out_features=128, bias=True),
nn.Linear(in_features=128, out_features=32, bias=True),
nn.Linear(in_features=32, out_features=num_classes, bias=True))
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_resnet50(data=None, model=None, mapping=None, shablon=''):
try:
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data), 0).to(device)
with torch.no_grad():
output = model(img)
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_resnet50(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_imag_' + str(model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,146 @@
import sklearn
from sklearn.ensemble import BaggingClassifier
import numpy as np
import torch
from torch.utils._contextlib import F
from torch.utils.data import TensorDataset, DataLoader
import torch.nn as nn
from torch.optim import Adam
class SimpleCNN(nn.Module):
def __init__(self):
super(SimpleCNN, self).__init__()
self.conv1 = nn.Conv2d(in_channels=1, out_channels=3, kernel_size=5, stride=1, padding=2)
self.conv1_s = nn.Conv2d(in_channels=3, out_channels=3, kernel_size=5, stride=2, padding=2)
self.conv2 = nn.Conv2d(in_channels=3, out_channels=6, kernel_size=3, stride=1, padding=1)
self.conv2_s = nn.Conv2d(in_channels=6, out_channels=6, kernel_size=3, stride=2, padding=1)
self.conv3 = nn.Conv2d(in_channels=6, out_channels=10, kernel_size=3, stride=1, padding=1)
self.conv3_s = nn.Conv2d(in_channels=10, out_channels=10, kernel_size=3, stride=2, padding=1)
self.flatten = nn.Flatten()
self.fc1 = nn.Linear(10, 10)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.relu(self.conv1_s(x))
x = F.relu(self.conv2(x))
x = F.relu(self.conv2_s(x))
x = F.relu(self.conv3(x))
x = F.relu(self.conv3_s(x))
x = self.flatten(x)
x = self.fc1(x)
x = F.softmax(x)
return x
class PytorchModel(sklearn.base.BaseEstimator):
def __init__(self, net_type, net_params, optim_type, optim_params, loss_fn,
input_shape, batch_size=32, accuracy_tol=0.02, tol_epochs=10,
cuda=True):
self.classes_ = None
self.optim = None
self.net = None
self.net_type = net_type
self.net_params = net_params
self.optim_type = optim_type
self.optim_params = optim_params
self.loss_fn = loss_fn
self.input_shape = input_shape
self.batch_size = batch_size
self.accuracy_tol = accuracy_tol
self.tol_epochs = tol_epochs
self.cuda = cuda
def fit(self, X, y):
self.net = self.net_type(**self.net_params)
if self.cuda:
self.net = self.net.cuda()
self.optim = self.optim_type(self.net.parameters(), **self.optim_params)
uniq_classes = np.sort(np.unique(y))
self.classes_ = uniq_classes
X = X.reshape(-1, *self.input_shape)
x_tensor = torch.tensor(X.astype(np.float32))
y_tensor = torch.tensor(y.astype(np.long))
train_dataset = TensorDataset(x_tensor, y_tensor)
train_loader = DataLoader(train_dataset, batch_size=self.batch_size,
shuffle=True, drop_last=False)
last_accuracies = []
epoch = 0
keep_training = True
while keep_training:
self.net.train()
train_samples_count = 0
true_train_samples_count = 0
for batch in train_loader:
x_data, y_data = batch[0], batch[1]
if self.cuda:
x_data = x_data.cuda()
y_data = y_data.cuda()
y_pred = self.net(x_data)
loss = self.loss_fn(y_pred, y_data)
self.optim.zero_grad()
loss.backward()
self.optim.step()
y_pred = y_pred.argmax(dim=1, keepdim=False)
true_classified = (y_pred == y_data).sum().item()
true_train_samples_count += true_classified
train_samples_count += len(x_data)
train_accuracy = true_train_samples_count / train_samples_count
last_accuracies.append(train_accuracy)
if len(last_accuracies) > self.tol_epochs:
last_accuracies.pop(0)
if len(last_accuracies) == self.tol_epochs:
accuracy_difference = max(last_accuracies) - min(last_accuracies)
if accuracy_difference <= self.accuracy_tol:
keep_training = False
def predict_proba(self, X, y=None):
X = X.reshape(-1, *self.input_shape)
x_tensor = torch.tensor(X.astype(np.float32))
if y:
y_tensor = torch.tensor(y.astype(np.float32))
else:
y_tensor = torch.zeros(len(X), dtype=torch.long)
test_dataset = TensorDataset(x_tensor, y_tensor)
test_loader = DataLoader(test_dataset, batch_size=self.batch_size,
shuffle=False, drop_last=False)
self.net.eval()
predictions = []
for batch in test_loader:
x_data, y_data = batch[0], batch[1]
if self.cuda:
x_data = x_data.cuda()
y_data = y_data.cuda()
y_pred = self.net(x_data)
predictions.append(y_pred.detach().cpu().numpy())
predictions = np.concatenate(predictions)
return predictions
def predict(self, x, y=None):
predictions = self.predict_proba(x, y)
predictions = predictions.argmax(axis=1)
return predictions
base_model = PytorchModel(net_type=SimpleCNN, net_params=dict(), optim_type=Adam,
optim_params={"lr": 1e-3}, loss_fn=nn.CrossEntropyLoss(),
input_shape=(1, 8, 8), batch_size=32, accuracy_tol=0.02,
tol_epochs=10, cuda=True)

@ -0,0 +1,154 @@
import torchsig.transforms.transforms as transform
from importlib import import_module
import matplotlib.pyplot as plt
import torch.nn as nn
import numpy as np
import mlconfig
import torch
import cv2
import io
def pre_func_resnet18(data=None, src ='', ind_inference=0):
try:
figsize = (16, 16)
dpi = 64
signal = np.vectorize(complex)(data[0], data[1])
spec = transform.Spectrogram(nperseg=1024)
spectr = np.array(spec(signal)[:, :figsize[0] * dpi])
mag = np.abs(signal)
real = signal.real
fig2 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
plt.plot(real, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf2 = io.BytesIO()
fig2.savefig(buf2, format="png", dpi=dpi)
buf2.seek(0)
img_arr2 = np.frombuffer(buf2.getvalue(), dtype=np.uint8)
buf2.close()
img2 = cv2.imdecode(img_arr2, 1)
img2 = cv2.cvtColor(img2, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig2)
fig3 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
plt.plot(mag, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf3 = io.BytesIO()
fig3.savefig(buf3, format="png", dpi=dpi)
buf3.seek(0)
img_arr3 = np.frombuffer(buf3.getvalue(), dtype=np.uint8)
buf3.close()
img3 = cv2.imdecode(img_arr3, 1)
img3 = cv2.cvtColor(img3, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig3)
resize = (256,256)
resized_real = cv2.resize(img2, resize)
resized_mag = cv2.resize(img3, resize)
resized_spectr = cv2.resize(spectr, resize)
img = np.array([resized_real, resized_mag, resized_spectr])
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_resnet18(file_model='', file_config='', num_classes=None):
try:
config = mlconfig.load(file_config)
model = getattr(import_module(config.model.architecture.rsplit('.', maxsplit=1)[0]),
config.model.architecture.rsplit('.', maxsplit=1)[1])()
model.conv1 = torch.nn.Sequential(torch.nn.Conv2d(2, 3, kernel_size=(7, 7), stride=(2, 2),
padding=(3, 3), bias=False), model.conv1)
model.fc = nn.Linear(in_features=512, out_features=num_classes, bias=True)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_resnet18(data=None, model=None, mapping=None, shablon=''):
try:
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data), 0).to(device)
with torch.no_grad():
output = model(img)
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_resnet18(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_imag_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[2], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_spec_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,154 @@
import torchsig.torchsig.transforms as transform
from importlib import import_module
import matplotlib.pyplot as plt
import torch.nn as nn
import numpy as np
import mlconfig
import torch
import cv2
import io
def pre_func_resnet18(data=None, src ='', ind_inference=0):
try:
figsize = (16, 16)
dpi = 64
signal = np.vectorize(complex)(data[0], data[1])
spec = transform.Spectrogram(nperseg=1024)
spectr = np.array(spec(signal)[:, :figsize[0] * dpi])
mag = np.abs(signal)
real = signal.real
fig2 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
plt.plot(real, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf2 = io.BytesIO()
fig2.savefig(buf2, format="png", dpi=dpi)
buf2.seek(0)
img_arr2 = np.frombuffer(buf2.getvalue(), dtype=np.uint8)
buf2.close()
img2 = cv2.imdecode(img_arr2, 1)
img2 = cv2.cvtColor(img2, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig2)
fig3 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
plt.plot(mag, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf3 = io.BytesIO()
fig3.savefig(buf3, format="png", dpi=dpi)
buf3.seek(0)
img_arr3 = np.frombuffer(buf3.getvalue(), dtype=np.uint8)
buf3.close()
img3 = cv2.imdecode(img_arr3, 1)
img3 = cv2.cvtColor(img3, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig3)
resize = (256, 256)
resized_real = cv2.resize(img2, resize)
resized_mag = cv2.resize(img3, resize)
resized_spectr = cv2.resize(spectr, resize)
img = np.array([resized_real, resized_mag, resized_spectr])
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_resnet18(file_model='', file_config='', num_classes=None):
try:
config = mlconfig.load(file_config)
model = getattr(import_module(config.model.architecture.rsplit('.', maxsplit=1)[0]),
config.model.architecture.rsplit('.', maxsplit=1)[1])()
model.conv1 = torch.nn.Sequential(torch.nn.Conv2d(2, 3, kernel_size=(7, 7), stride=(2, 2),
padding=(3, 3), bias=False), model.conv1)
model.fc = nn.Linear(in_features=512, out_features=num_classes, bias=True)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_resnet18(data=None, model=None, mapping=None, shablon=''):
try:
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data), 0).to(device)
with torch.no_grad():
output = model(img)
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_resnet18(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_imag_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[2], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_spec_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,154 @@
import torchsig.transforms.transforms as transform
from importlib import import_module
import matplotlib.pyplot as plt
import torch.nn as nn
import numpy as np
import mlconfig
import torch
import cv2
import io
def pre_func_resnet18(data=None, src ='', ind_inference=0):
try:
figsize = (16, 16)
dpi = 64
signal = np.vectorize(complex)(data[0], data[1])
spec = transform.Spectrogram(nperseg=1024)
spectr = np.array(spec(signal)[:, :figsize[0] * dpi])
mag = np.abs(signal)
real = signal.real
fig2 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
plt.plot(real, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf2 = io.BytesIO()
fig2.savefig(buf2, format="png", dpi=dpi)
buf2.seek(0)
img_arr2 = np.frombuffer(buf2.getvalue(), dtype=np.uint8)
buf2.close()
img2 = cv2.imdecode(img_arr2, 1)
img2 = cv2.cvtColor(img2, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig2)
fig3 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
plt.plot(mag, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf3 = io.BytesIO()
fig3.savefig(buf3, format="png", dpi=dpi)
buf3.seek(0)
img_arr3 = np.frombuffer(buf3.getvalue(), dtype=np.uint8)
buf3.close()
img3 = cv2.imdecode(img_arr3, 1)
img3 = cv2.cvtColor(img3, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig3)
resize = (256, 256)
resized_real = cv2.resize(img2, resize)
resized_mag = cv2.resize(img3, resize)
resized_spectr = cv2.resize(spectr, resize)
img = np.array([resized_real, resized_mag, resized_spectr])
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_resnet18(file_model='', file_config='', num_classes=None):
try:
config = mlconfig.load(file_config)
model = getattr(import_module(config.model.architecture.rsplit('.', maxsplit=1)[0]),
config.model.architecture.rsplit('.', maxsplit=1)[1])()
model.conv1 = torch.nn.Sequential(torch.nn.Conv2d(2, 3, kernel_size=(7, 7), stride=(2, 2),
padding=(3, 3), bias=False), model.conv1)
model.fc = nn.Linear(in_features=512, out_features=num_classes, bias=True)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_resnet18(data=None, model=None, mapping=None, shablon=''):
try:
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data), 0).to(device)
with torch.no_grad():
output = model(img)
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_resnet18(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_imag_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[2], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_spec_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,151 @@
import torchsig.transforms.transforms as transform
from importlib import import_module
import matplotlib.pyplot as plt
import torch.nn as nn
import numpy as np
import mlconfig
import torch
import cv2
import io
def pre_func_resnet18(data=None, src ='', ind_inference=0):
try:
figsize = (16, 16)
dpi = 64
signal = np.vectorize(complex)(data[0], data[1])
spec = transform.Spectrogram(nperseg=1024)
spectr = np.array(spec(signal)[:, :figsize[0] * dpi])
mag = np.abs(signal)
real = signal.real
fig2 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
plt.plot(real, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf2 = io.BytesIO()
fig2.savefig(buf2, format="png", dpi=dpi)
buf2.seek(0)
img_arr2 = np.frombuffer(buf2.getvalue(), dtype=np.uint8)
buf2.close()
img2 = cv2.imdecode(img_arr2, 1)
img2 = cv2.cvtColor(img2, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig2)
fig3 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
plt.plot(mag, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf3 = io.BytesIO()
fig3.savefig(buf3, format="png", dpi=dpi)
buf3.seek(0)
img_arr3 = np.frombuffer(buf3.getvalue(), dtype=np.uint8)
buf3.close()
img3 = cv2.imdecode(img_arr3, 1)
img3 = cv2.cvtColor(img3, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig3)
img = np.array([img2, img3, spectr])
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_resnet18(file_model='', file_config='', num_classes=None):
try:
config = mlconfig.load(file_config)
model = getattr(import_module(config.model.architecture.rsplit('.', maxsplit=1)[0]),
config.model.architecture.rsplit('.', maxsplit=1)[1])()
model.conv1 = torch.nn.Sequential(torch.nn.Conv2d(2, 3, kernel_size=(7, 7), stride=(2, 2),
padding=(3, 3), bias=False), model.conv1)
print(model)
model.fc = nn.Linear(in_features=512, out_features=num_classes, bias=True)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_resnet18(data=None, model=None, mapping=None, shablon=''):
try:
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data), 0).to(device)
with torch.no_grad():
output = model(img)
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_resnet18(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_imag_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[2], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_spec_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,252 @@
import torchsig.transforms.dataset_transforms as transform
import torchsig.transforms.functional as F
from importlib import import_module
from torchvision import models
import torch.nn as nn
import matplotlib
import numpy as np
import torch
import cv2
import gc
import io
def pre_func_ensemble(data=None, src ='', ind_inference=0):
try:
import matplotlib.pyplot as plt
matplotlib.use('Agg')
plt.ioff()
figsize = (16, 16)
dpi = 16
signal = np.vectorize(complex)(data[0], data[1])
# if int(ind_inference) <= 1500:
# np.save(src + '_inference_1200_' + str(ind_inference) + '.npy', signal)
print(0)
spectr = np.asarray(F.spectrogram(signal,fft_size=256,fft_stride=256), dtype=np.float32)
print('a')
print(spectr.shape)
print('b')
fig1 = plt.figure(figsize = figsize)
plt.axes(ylim=(-1, 1))
sigr = signal.real
sigi = signal.imag
print(1)
plt.plot(sigr, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top = 1, bottom = 0, right = 1, left = 0, hspace = 0, wspace = 0)
plt.margins(0,0)
buf1 = io.BytesIO()
fig1.savefig(buf1, format="png", dpi=dpi)
buf1.seek(0)
img_arr1 = np.frombuffer(buf1.getvalue(), dtype=np.uint8)
buf1.close()
img1 = cv2.imdecode(img_arr1, 1)
img1 = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig1)
print(2)
fig2 = plt.figure(figsize = figsize)
plt.axes(ylim=(-1, 1))
sigr = signal.real
sigi = signal.imag
plt.plot(sigi, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top = 1, bottom = 0, right = 1, left = 0, hspace = 0, wspace = 0)
plt.margins(0,0)
buf = io.BytesIO()
fig2.savefig(buf, format="png", dpi=dpi)
buf.seek(0)
img_arr = np.frombuffer(buf.getvalue(), dtype=np.uint8)
buf.close()
img = cv2.imdecode(img_arr, 1)
img2 = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig2)
print(3)
img = np.array([img1, img2, spectr[:,:figsize[0]*dpi]])
cv2.destroyAllWindows()
del signal
del spectr
del img1
del img2
del sigr
del sigi
del buf
del buf1
del img_arr
del img_arr1
cv2.destroyAllWindows()
gc.collect()
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_ensemble(file_model='', file_config='', num_classes=None):
try:
import matplotlib.pyplot as plt
matplotlib.use('Agg')
plt.ioff()
torch.cuda.empty_cache()
model1 = models.resnet18(pretrained=False)
model2 = models.resnet50(pretrained=False)
model3 = models.resnet101(pretrained=False)
num_classes = 2
model1.fc = nn.Linear(model1.fc.in_features, num_classes)
model2.fc = nn.Linear(model2.fc.in_features, num_classes)
model3.fc = nn.Linear(model3.fc.in_features, num_classes)
class Ensemble(nn.Module):
def __init__(self, model1, model2, model3):
super(Ensemble, self).__init__()
self.model1 = model1
self.model2 = model2
self.model3 = model3
self.fc = nn.Linear(3 * num_classes, num_classes)
def forward(self, x):
x1 = self.model1(x)
x2 = self.model2(x)
x3 = self.model3(x)
x = torch.cat((x1, x2, x3), dim=1)
x = self.fc(x)
return x
model = Ensemble(model1, model2, model3)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
cv2.destroyAllWindows()
del model1
del model2
del model3
gc.collect()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_ensemble(data=None, model=None, mapping=None, shablon=''):
try:
cv2.destroyAllWindows()
gc.collect()
torch.cuda.empty_cache()
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data).cpu(), 0).to(device)
with torch.no_grad():
output = model(img.float())
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
output = output.cpu()
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
del img
del label
del expon
del output
cv2.destroyAllWindows()
gc.collect()
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_ensemble(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
import matplotlib.pyplot as plt
matplotlib.use('Agg')
plt.ioff()
if int(ind_inference) <= 100:
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close(fig)
del fig
del ax
cv2.destroyAllWindows()
gc.collect()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_mod_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close(fig)
del fig
del ax
cv2.destroyAllWindows()
gc.collect()
fig, ax = plt.subplots()
ax.imshow(data[2], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_spec_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close(fig)
del fig
del ax
cv2.destroyAllWindows()
gc.collect()
plt.clf()
plt.cla()
plt.close()
cv2.destroyAllWindows()
gc.collect()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,252 @@
import torchsig.transforms.dataset_transforms as transform
import torchsig.transforms.functional as F
from importlib import import_module
from torchvision import models
import torch.nn as nn
import matplotlib
import numpy as np
import torch
import cv2
import gc
import io
def pre_func_ensemble(data=None, src ='', ind_inference=0):
try:
import matplotlib.pyplot as plt
matplotlib.use('Agg')
plt.ioff()
figsize = (16, 16)
dpi = 16
signal = np.vectorize(complex)(data[0], data[1])
# if int(ind_inference) <= 1500:
# np.save(src + '_inference_2400_' + str(ind_inference) + '.npy', signal)
print(0)
spectr = np.asarray(F.spectrogram(signal,fft_size=256,fft_stride=256), dtype=np.float32)
print('a')
print(spectr.shape)
print('b')
fig1 = plt.figure(figsize = figsize)
plt.axes(ylim=(-1, 1))
sigr = signal.real
sigi = signal.imag
print(1)
plt.plot(sigr, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top = 1, bottom = 0, right = 1, left = 0, hspace = 0, wspace = 0)
plt.margins(0,0)
buf1 = io.BytesIO()
fig1.savefig(buf1, format="png", dpi=dpi)
buf1.seek(0)
img_arr1 = np.frombuffer(buf1.getvalue(), dtype=np.uint8)
buf1.close()
img1 = cv2.imdecode(img_arr1, 1)
img1 = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig1)
print(2)
fig2 = plt.figure(figsize = figsize)
plt.axes(ylim=(-1, 1))
sigr = signal.real
sigi = signal.imag
plt.plot(sigi, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top = 1, bottom = 0, right = 1, left = 0, hspace = 0, wspace = 0)
plt.margins(0,0)
buf = io.BytesIO()
fig2.savefig(buf, format="png", dpi=dpi)
buf.seek(0)
img_arr = np.frombuffer(buf.getvalue(), dtype=np.uint8)
buf.close()
img = cv2.imdecode(img_arr, 1)
img2 = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig2)
print(3)
img = np.array([img1, img2, spectr[:,:figsize[0]*dpi]])
cv2.destroyAllWindows()
del signal
del spectr
del img1
del img2
del sigr
del sigi
del buf
del buf1
del img_arr
del img_arr1
cv2.destroyAllWindows()
gc.collect()
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_ensemble(file_model='', file_config='', num_classes=None):
try:
import matplotlib.pyplot as plt
matplotlib.use('Agg')
plt.ioff()
torch.cuda.empty_cache()
model1 = models.resnet18(pretrained=False)
model2 = models.resnet50(pretrained=False)
model3 = models.resnet101(pretrained=False)
num_classes = 2
model1.fc = nn.Linear(model1.fc.in_features, num_classes)
model2.fc = nn.Linear(model2.fc.in_features, num_classes)
model3.fc = nn.Linear(model3.fc.in_features, num_classes)
class Ensemble(nn.Module):
def __init__(self, model1, model2, model3):
super(Ensemble, self).__init__()
self.model1 = model1
self.model2 = model2
self.model3 = model3
self.fc = nn.Linear(3 * num_classes, num_classes)
def forward(self, x):
x1 = self.model1(x)
x2 = self.model2(x)
x3 = self.model3(x)
x = torch.cat((x1, x2, x3), dim=1)
x = self.fc(x)
return x
model = Ensemble(model1, model2, model3)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
cv2.destroyAllWindows()
del model1
del model2
del model3
gc.collect()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_ensemble(data=None, model=None, mapping=None, shablon=''):
try:
cv2.destroyAllWindows()
gc.collect()
torch.cuda.empty_cache()
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data).cpu(), 0).to(device)
with torch.no_grad():
output = model(img.float())
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
output = output.cpu()
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
del img
del label
del expon
del output
cv2.destroyAllWindows()
gc.collect()
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_ensemble(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
import matplotlib.pyplot as plt
matplotlib.use('Agg')
plt.ioff()
if int(ind_inference) <= 100:
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close(fig)
del fig
del ax
cv2.destroyAllWindows()
gc.collect()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_mod_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close(fig)
del fig
del ax
cv2.destroyAllWindows()
gc.collect()
fig, ax = plt.subplots()
ax.imshow(data[2], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_spec_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close(fig)
del fig
del ax
cv2.destroyAllWindows()
gc.collect()
plt.clf()
plt.cla()
plt.close()
cv2.destroyAllWindows()
gc.collect()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,252 @@
import torchsig.transforms.dataset_transforms as transform
import torchsig.transforms.functional as F
from importlib import import_module
from torchvision import models
import torch.nn as nn
import matplotlib
import numpy as np
import torch
import cv2
import gc
import io
def pre_func_ensemble(data=None, src ='', ind_inference=0):
try:
import matplotlib.pyplot as plt
matplotlib.use('Agg')
plt.ioff()
figsize = (16, 16)
dpi = 16
signal = np.vectorize(complex)(data[0], data[1])
# if int(ind_inference) <= 1500:
# np.save(src + '_inference_915_' + str(ind_inference) + '.npy', signal)
print(0)
spectr = np.asarray(F.spectrogram(signal,fft_size=256,fft_stride=256), dtype=np.float32)
print('a')
print(spectr.shape)
print('b')
fig1 = plt.figure(figsize = figsize)
plt.axes(ylim=(-1, 1))
sigr = signal.real
sigi = signal.imag
print(1)
plt.plot(sigr, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top = 1, bottom = 0, right = 1, left = 0, hspace = 0, wspace = 0)
plt.margins(0,0)
buf1 = io.BytesIO()
fig1.savefig(buf1, format="png", dpi=dpi)
buf1.seek(0)
img_arr1 = np.frombuffer(buf1.getvalue(), dtype=np.uint8)
buf1.close()
img1 = cv2.imdecode(img_arr1, 1)
img1 = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig1)
print(2)
fig2 = plt.figure(figsize = figsize)
plt.axes(ylim=(-1, 1))
sigr = signal.real
sigi = signal.imag
plt.plot(sigi, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top = 1, bottom = 0, right = 1, left = 0, hspace = 0, wspace = 0)
plt.margins(0,0)
buf = io.BytesIO()
fig2.savefig(buf, format="png", dpi=dpi)
buf.seek(0)
img_arr = np.frombuffer(buf.getvalue(), dtype=np.uint8)
buf.close()
img = cv2.imdecode(img_arr, 1)
img2 = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig2)
print(3)
img = np.array([img1, img2, spectr[:,:figsize[0]*dpi]])
cv2.destroyAllWindows()
del signal
del spectr
del img1
del img2
del sigr
del sigi
del buf
del buf1
del img_arr
del img_arr1
cv2.destroyAllWindows()
gc.collect()
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_ensemble(file_model='', file_config='', num_classes=None):
try:
import matplotlib.pyplot as plt
matplotlib.use('Agg')
plt.ioff()
torch.cuda.empty_cache()
model1 = models.resnet18(pretrained=False)
model2 = models.resnet50(pretrained=False)
model3 = models.resnet101(pretrained=False)
num_classes = 2
model1.fc = nn.Linear(model1.fc.in_features, num_classes)
model2.fc = nn.Linear(model2.fc.in_features, num_classes)
model3.fc = nn.Linear(model3.fc.in_features, num_classes)
class Ensemble(nn.Module):
def __init__(self, model1, model2, model3):
super(Ensemble, self).__init__()
self.model1 = model1
self.model2 = model2
self.model3 = model3
self.fc = nn.Linear(3 * num_classes, num_classes)
def forward(self, x):
x1 = self.model1(x)
x2 = self.model2(x)
x3 = self.model3(x)
x = torch.cat((x1, x2, x3), dim=1)
x = self.fc(x)
return x
model = Ensemble(model1, model2, model3)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
cv2.destroyAllWindows()
del model1
del model2
del model3
gc.collect()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_ensemble(data=None, model=None, mapping=None, shablon=''):
try:
cv2.destroyAllWindows()
gc.collect()
torch.cuda.empty_cache()
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data).cpu(), 0).to(device)
with torch.no_grad():
output = model(img.float())
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
output = output.cpu()
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
del img
del label
del expon
del output
cv2.destroyAllWindows()
gc.collect()
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_ensemble(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
import matplotlib.pyplot as plt
matplotlib.use('Agg')
plt.ioff()
if int(ind_inference) <= 100:
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close(fig)
del fig
del ax
cv2.destroyAllWindows()
gc.collect()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_mod_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close(fig)
del fig
del ax
cv2.destroyAllWindows()
gc.collect()
fig, ax = plt.subplots()
ax.imshow(data[2], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_spec_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close(fig)
del fig
del ax
cv2.destroyAllWindows()
gc.collect()
plt.clf()
plt.cla()
plt.close()
cv2.destroyAllWindows()
gc.collect()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,163 @@
import torchsig.transforms.transforms as transform
from importlib import import_module
import matplotlib.pyplot as plt
import torch.nn as nn
import numpy as np
import mlconfig
import torch
import cv2
import io
def pre_func_resnet18(data=None, src ='', ind_inference=0):
try:
figsize = (16, 16)
dpi = 64
signal = np.vectorize(complex)(data[0], data[1])
spec = transform.Spectrogram(nperseg=1024)
spectr = np.array(spec(signal)[:, :figsize[0] * dpi])
mag = np.abs(signal)
real = signal.real
fig2 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
plt.plot(real, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf2 = io.BytesIO()
fig2.savefig(buf2, format="png", dpi=dpi)
buf2.seek(0)
img_arr2 = np.frombuffer(buf2.getvalue(), dtype=np.uint8)
buf2.close()
img2 = cv2.imdecode(img_arr2, 1)
img2 = cv2.cvtColor(img2, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig2)
fig3 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
plt.plot(mag, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf3 = io.BytesIO()
fig3.savefig(buf3, format="png", dpi=dpi)
buf3.seek(0)
img_arr3 = np.frombuffer(buf3.getvalue(), dtype=np.uint8)
buf3.close()
img3 = cv2.imdecode(img_arr3, 1)
img3 = cv2.cvtColor(img3, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig3)
resize = (256, 256)
resized_real = cv2.resize(img2, resize)
resized_mag = cv2.resize(img3, resize)
resized_spectr = cv2.resize(spectr, resize)
img = np.array([resized_real, resized_mag, resized_spectr])
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_resnet18(file_model='', file_config='', num_classes=None):
try:
torch.cuda.empty_cache()
config = mlconfig.load(file_config)
model = getattr(import_module(config.model.architecture.rsplit('.', maxsplit=1)[0]),
config.model.architecture.rsplit('.', maxsplit=1)[1])()
model.conv1 = nn.Sequential(nn.Conv2d(2, 3, kernel_size=(7, 7), stride=(2, 2),
padding=(3, 3), bias=False), model.conv1)
model.fc = nn.Sequential(
nn.Linear(in_features=512, out_features=128, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.5, inplace=False),
nn.Linear(in_features=128, out_features=32, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.5, inplace=False),
nn.Linear(in_features=32, out_features=5, bias=True)
)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_resnet18(data=None, model=None, mapping=None, shablon=''):
try:
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data), 0).to(device)
with torch.no_grad():
output = model(img)
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_resnet18(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_imag_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[2], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_spec_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,163 @@
import torchsig.transforms.transforms as transform
from importlib import import_module
import matplotlib.pyplot as plt
import torch.nn as nn
import numpy as np
import mlconfig
import torch
import cv2
import io
def pre_func_resnet18(data=None, src ='', ind_inference=0):
try:
figsize = (16, 16)
dpi = 64
signal = np.vectorize(complex)(data[0], data[1])
spec = transform.Spectrogram(nperseg=1024)
spectr = np.array(spec(signal)[:, :figsize[0] * dpi])
mag = np.abs(signal)
real = signal.real
fig2 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
plt.plot(real, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf2 = io.BytesIO()
fig2.savefig(buf2, format="png", dpi=dpi)
buf2.seek(0)
img_arr2 = np.frombuffer(buf2.getvalue(), dtype=np.uint8)
buf2.close()
img2 = cv2.imdecode(img_arr2, 1)
img2 = cv2.cvtColor(img2, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig2)
fig3 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
plt.plot(mag, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf3 = io.BytesIO()
fig3.savefig(buf3, format="png", dpi=dpi)
buf3.seek(0)
img_arr3 = np.frombuffer(buf3.getvalue(), dtype=np.uint8)
buf3.close()
img3 = cv2.imdecode(img_arr3, 1)
img3 = cv2.cvtColor(img3, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig3)
resize = (256, 256)
resized_real = cv2.resize(img2, resize)
resized_mag = cv2.resize(img3, resize)
resized_spectr = cv2.resize(spectr, resize)
img = np.array([resized_real, resized_mag, resized_spectr])
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_resnet18(file_model='', file_config='', num_classes=None):
try:
torch.cuda.empty_cache()
config = mlconfig.load(file_config)
model = getattr(import_module(config.model.architecture.rsplit('.', maxsplit=1)[0]),
config.model.architecture.rsplit('.', maxsplit=1)[1])()
model.conv1 = nn.Sequential(nn.Conv2d(3, 3, kernel_size=(7, 7), stride=(2, 2),
padding=(3, 3), bias=False), model.conv1)
model.fc = nn.Sequential(
nn.Linear(in_features=512, out_features=128, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.5, inplace=False),
nn.Linear(in_features=128, out_features=32, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.5, inplace=False),
nn.Linear(in_features=32, out_features=5, bias=True)
)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_resnet18(data=None, model=None, mapping=None, shablon=''):
try:
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data), 0).to(device)
with torch.no_grad():
output = model(img)
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_resnet18(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_imag_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[2], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_spec_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,208 @@
import torchsig.transforms.transforms as transform
from importlib import import_module
import matplotlib.pyplot as plt
import torch.nn as nn
import matplotlib
import numpy as np
import mlconfig
import torch
import cv2
import gc
import io
def pre_func_resnet18(data=None, src ='', ind_inference=0):
try:
matplotlib.use('Agg')
plt.ioff()
figsize = (8, 8)
dpi = 32
signal = np.vectorize(complex)(data[0], data[1])
spec = transform.Spectrogram(nperseg=256)
spectr = np.array(spec(signal)[:, :figsize[0] * dpi])
mag = np.abs(signal)
real = signal.real
fig2 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
plt.plot(real, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf2 = io.BytesIO()
fig2.savefig(buf2, format="png", dpi=dpi)
buf2.seek(0)
img_arr2 = np.frombuffer(buf2.getvalue(), dtype=np.uint8)
buf2.close()
img2 = cv2.imdecode(img_arr2, 1)
img2 = cv2.cvtColor(img2, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig2)
fig3 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
plt.plot(mag, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf3 = io.BytesIO()
fig3.savefig(buf3, format="png", dpi=dpi)
buf3.seek(0)
img_arr3 = np.frombuffer(buf3.getvalue(), dtype=np.uint8)
buf3.close()
img3 = cv2.imdecode(img_arr3, 1)
img3 = cv2.cvtColor(img3, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig3)
resize = (256, 256)
resized_real = cv2.resize(img2, resize)
resized_mag = cv2.resize(img3, resize)
# resized_spectr = cv2.resize(spectr, resize)
img = np.array([resized_real, resized_mag, spectr])
cv2.destroyAllWindows()
del signal
del spec
del spectr
del real
del mag
del buf2
del img_arr2
del img2
del buf3
del img_arr3
del img3
del resized_real
del resized_mag
cv2.destroyAllWindows()
gc.collect()
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_resnet18(file_model='', file_config='', num_classes=None):
try:
matplotlib.use('Agg')
plt.ioff()
torch.cuda.empty_cache()
config = mlconfig.load(file_config)
model = getattr(import_module(config.model.architecture.rsplit('.', maxsplit=1)[0]),
config.model.architecture.rsplit('.', maxsplit=1)[1])()
model.conv1 = nn.Sequential(nn.Conv2d(3, 3, kernel_size=(7, 7), stride=(2, 2),
padding=(3, 3), bias=False), model.conv1)
model.fc = nn.Sequential(
nn.Linear(in_features=512, out_features=128, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.5, inplace=False),
nn.Linear(in_features=128, out_features=32, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.5, inplace=False),
nn.Linear(in_features=32, out_features=5, bias=True)
)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
cv2.destroyAllWindows()
gc.collect()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_resnet18(data=None, model=None, mapping=None, shablon=''):
try:
cv2.destroyAllWindows()
gc.collect()
torch.cuda.empty_cache()
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data), 0).to(device)
with torch.no_grad():
output = model(img.float())
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
del label
del expon
del output
cv2.destroyAllWindows()
gc.collect()
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_resnet18(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
matplotlib.use('Agg')
plt.ioff()
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_imag_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[2], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_spec_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
del fig
del ax
cv2.destroyAllWindows()
gc.collect()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,163 @@
import torchsig.transforms.transforms as transform
from importlib import import_module
import matplotlib.pyplot as plt
import torch.nn as nn
import numpy as np
import mlconfig
import torch
import cv2
import io
def pre_func_resnet18(data=None, src ='', ind_inference=0):
try:
figsize = (16, 16)
dpi = 64
signal = np.vectorize(complex)(data[0], data[1])
spec = transform.Spectrogram(nperseg=1024)
spectr = np.array(spec(signal)[:, :figsize[0] * dpi])
mag = np.abs(signal)
real = signal.real
fig2 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
plt.plot(real, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf2 = io.BytesIO()
fig2.savefig(buf2, format="png", dpi=dpi)
buf2.seek(0)
img_arr2 = np.frombuffer(buf2.getvalue(), dtype=np.uint8)
buf2.close()
img2 = cv2.imdecode(img_arr2, 1)
img2 = cv2.cvtColor(img2, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig2)
fig3 = plt.figure(figsize=figsize)
plt.axes(ylim=(-1, 1))
plt.plot(mag, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
buf3 = io.BytesIO()
fig3.savefig(buf3, format="png", dpi=dpi)
buf3.seek(0)
img_arr3 = np.frombuffer(buf3.getvalue(), dtype=np.uint8)
buf3.close()
img3 = cv2.imdecode(img_arr3, 1)
img3 = cv2.cvtColor(img3, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig3)
resize = (256, 256)
resized_real = cv2.resize(img2, resize)
resized_mag = cv2.resize(img3, resize)
resized_spectr = cv2.resize(spectr, resize)
img = np.array([resized_real, resized_mag, resized_spectr])
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_resnet18(file_model='', file_config='', num_classes=None):
try:
torch.cuda.empty_cache()
config = mlconfig.load(file_config)
model = getattr(import_module(config.model.architecture.rsplit('.', maxsplit=1)[0]),
config.model.architecture.rsplit('.', maxsplit=1)[1])()
model.conv1 = nn.Sequential(nn.Conv2d(3, 3, kernel_size=(7, 7), stride=(2, 2),
padding=(3, 3), bias=False), model.conv1)
model.fc = nn.Sequential(
nn.Linear(in_features=512, out_features=128, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.5, inplace=False),
nn.Linear(in_features=128, out_features=32, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.5, inplace=False),
nn.Linear(in_features=32, out_features=5, bias=True)
)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_resnet18(data=None, model=None, mapping=None, shablon=''):
try:
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data), 0).to(device)
with torch.no_grad():
output = model(img.float())
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_resnet18(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_imag_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[2], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_spec_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,216 @@
import torchsig.transforms.transforms as transform
import torchsig.transforms.functional as F
from importlib import import_module
import matplotlib.pyplot as plt
import torch.nn as nn
import matplotlib
import numpy as np
import mlconfig
import torch
import cv2
import gc
import io
def pre_func_resnet18(data=None, src ='', ind_inference=0):
try:
import matplotlib.pyplot as plt
matplotlib.use('Agg')
plt.ioff()
figsize = (16, 16)
dpi = 16
signal = np.vectorize(complex)(data[0], data[1])
# if int(ind_inference) <= 1500:
# np.save(src + '_inference_2400_' + str(ind_inference) + '.npy', signal)
print(0)
spectr = np.asarray(F.spectrogram(signal,fft_size=256,fft_stride=256), dtype=np.float32)
print('a')
print(spectr.shape)
print('b')
fig1 = plt.figure(figsize = figsize)
plt.axes(ylim=(-1, 1))
sigr = signal.real
sigi = signal.imag
print(1)
plt.plot(sigr, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top = 1, bottom = 0, right = 1, left = 0, hspace = 0, wspace = 0)
plt.margins(0,0)
buf1 = io.BytesIO()
fig1.savefig(buf1, format="png", dpi=dpi)
buf1.seek(0)
img_arr1 = np.frombuffer(buf1.getvalue(), dtype=np.uint8)
buf1.close()
img1 = cv2.imdecode(img_arr1, 1)
img1 = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig1)
print(2)
fig2 = plt.figure(figsize = figsize)
plt.axes(ylim=(-1, 1))
sigr = signal.real
sigi = signal.imag
plt.plot(sigi, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top = 1, bottom = 0, right = 1, left = 0, hspace = 0, wspace = 0)
plt.margins(0,0)
buf = io.BytesIO()
fig2.savefig(buf, format="png", dpi=dpi)
buf.seek(0)
img_arr = np.frombuffer(buf.getvalue(), dtype=np.uint8)
buf.close()
img = cv2.imdecode(img_arr, 1)
img2 = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig2)
print(3)
img = np.array([img1, img2, spectr[:,:figsize[0]*dpi]])
cv2.destroyAllWindows()
del signal
del spectr
del img1
del img2
del sigr
del sigi
del buf
del buf1
del img_arr
del img_arr1
cv2.destroyAllWindows()
gc.collect()
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_resnet18(file_model='', file_config='', num_classes=None):
try:
matplotlib.use('Agg')
plt.ioff()
torch.cuda.empty_cache()
config = mlconfig.load(file_config)
model = getattr(import_module(config.model.architecture.rsplit('.', maxsplit=1)[0]),
config.model.architecture.rsplit('.', maxsplit=1)[1])()
model.conv1 = nn.Sequential(nn.Conv2d(3, 3, kernel_size=(7, 7), stride=(2, 2),
padding=(3, 3), bias=False), model.conv1)
model.fc = nn.Sequential(
nn.Linear(in_features=512, out_features=128, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.5, inplace=False),
nn.Linear(in_features=128, out_features=32, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.5, inplace=False),
nn.Linear(in_features=32, out_features=16, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.5, inplace=False),
nn.Linear(in_features=16, out_features=3, bias=True)
)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
cv2.destroyAllWindows()
gc.collect()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_resnet18(data=None, model=None, mapping=None, shablon=''):
try:
cv2.destroyAllWindows()
gc.collect()
torch.cuda.empty_cache()
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data), 0).to(device)
with torch.no_grad():
output = model(img.float())
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
del label
del expon
del output
cv2.destroyAllWindows()
gc.collect()
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_resnet18(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
matplotlib.use('Agg')
plt.ioff()
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_imag_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[2], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_spec_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
del fig
del ax
cv2.destroyAllWindows()
gc.collect()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,206 @@
import torchsig.transforms.transforms as transform
from importlib import import_module
import matplotlib.pyplot as plt
import torch.nn as nn
import matplotlib
import numpy as np
import mlconfig
import torch
import cv2
import gc
import io
def pre_func_resnet18(data=None, src ='', ind_inference=0):
try:
matplotlib.use('Agg')
plt.ioff()
figsize = (8, 8)
dpi = 32
signal = np.vectorize(complex)(data[0], data[1])
spec = transform.Spectrogram(nperseg=256)
spectr = np.array(spec(signal)[:,:figsize[0] * dpi])
fig1 = plt.figure(figsize = figsize)
plt.axes(ylim=(-1, 1))
sigr = signal.real
sigi = signal.imag
plt.plot(sigr, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top = 1, bottom = 0, right = 1, left = 0, hspace = 0, wspace = 0)
plt.margins(0,0)
buf1 = io.BytesIO()
fig1.savefig(buf1, format="png", dpi=dpi)
buf1.seek(0)
img_arr1 = np.frombuffer(buf1.getvalue(), dtype=np.uint8)
buf1.close()
img1 = cv2.imdecode(img_arr1, 1)
img1 = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig1)
fig2 = plt.figure(figsize = figsize)
plt.axes(ylim=(-1, 1))
sigr = signal.real
sigi = signal.imag
plt.plot(sigi, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top = 1, bottom = 0, right = 1, left = 0, hspace = 0, wspace = 0)
plt.margins(0,0)
buf = io.BytesIO()
fig2.savefig(buf, format="png", dpi=dpi)
buf.seek(0)
img_arr = np.frombuffer(buf.getvalue(), dtype=np.uint8)
buf.close()
img = cv2.imdecode(img_arr, 1)
img2 = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig2)
img = np.array([img1, img2, spectr])
cv2.destroyAllWindows()
del signal
del spec
del spectr
del img1
del img2
del sigr
del sigi
del buf
del buf1
del img_arr
del img_arr1
cv2.destroyAllWindows()
gc.collect()
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_resnet18(file_model='', file_config='', num_classes=None):
try:
matplotlib.use('Agg')
plt.ioff()
torch.cuda.empty_cache()
config = mlconfig.load(file_config)
model = getattr(import_module(config.model.architecture.rsplit('.', maxsplit=1)[0]),
config.model.architecture.rsplit('.', maxsplit=1)[1])()
model.conv1 = nn.Sequential(nn.Conv2d(3, 3, kernel_size=(7, 7), stride=(2, 2),
padding=(3, 3), bias=False), model.conv1)
model.fc = nn.Sequential(
nn.Linear(in_features=512, out_features=128, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.7, inplace=False),
nn.Linear(in_features=128, out_features=32, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.7, inplace=False),
nn.Linear(in_features=32, out_features=16, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.7, inplace=False),
nn.Linear(in_features=16, out_features=3, bias=True)
)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
cv2.destroyAllWindows()
gc.collect()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_resnet18(data=None, model=None, mapping=None, shablon=''):
try:
cv2.destroyAllWindows()
gc.collect()
torch.cuda.empty_cache()
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data), 0).to(device)
with torch.no_grad():
output = model(img.float())
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
del label
del expon
del output
cv2.destroyAllWindows()
gc.collect()
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_resnet18(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
matplotlib.use('Agg')
plt.ioff()
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_imag_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[2], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_spec_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
del fig
del ax
cv2.destroyAllWindows()
gc.collect()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,208 @@
import torchsig.transforms.dataset_transforms as transform
import torchsig.transforms.functional as F
from importlib import import_module
import matplotlib.pyplot as plt
import torch.nn as nn
import matplotlib
import numpy as np
import mlconfig
import torch
import cv2
import gc
import io
def pre_func_resnet18(data=None, src ='', ind_inference=0):
try:
matplotlib.use('Agg')
plt.ioff()
figsize = (8, 8)
dpi = 32
signal = np.vectorize(complex)(data[0], data[1])
np.save(src + '_inference_' + str(ind_inference) + '.npy', signal)
spec = transform.Spectrogram(nperseg=256, fft_size=32)
spectr = np.array(spec(signal)[:,:figsize[0] * dpi])
fig1 = plt.figure(figsize = figsize)
plt.axes(ylim=(-1, 1))
sigr = signal.real
sigi = signal.imag
plt.plot(sigr, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top = 1, bottom = 0, right = 1, left = 0, hspace = 0, wspace = 0)
plt.margins(0,0)
buf1 = io.BytesIO()
fig1.savefig(buf1, format="png", dpi=dpi)
buf1.seek(0)
img_arr1 = np.frombuffer(buf1.getvalue(), dtype=np.uint8)
buf1.close()
img1 = cv2.imdecode(img_arr1, 1)
img1 = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig1)
fig2 = plt.figure(figsize = figsize)
plt.axes(ylim=(-1, 1))
sigr = signal.real
sigi = signal.imag
plt.plot(sigi, color='black')
plt.gca().set_axis_off()
plt.subplots_adjust(top = 1, bottom = 0, right = 1, left = 0, hspace = 0, wspace = 0)
plt.margins(0,0)
buf = io.BytesIO()
fig2.savefig(buf, format="png", dpi=dpi)
buf.seek(0)
img_arr = np.frombuffer(buf.getvalue(), dtype=np.uint8)
buf.close()
img = cv2.imdecode(img_arr, 1)
img2 = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
plt.clf()
plt.cla()
plt.close()
plt.close(fig2)
img = np.array([img1, img2, spectr])
cv2.destroyAllWindows()
del signal
del spec
del spectr
del img1
del img2
del sigr
del sigi
del buf
del buf1
del img_arr
del img_arr1
cv2.destroyAllWindows()
gc.collect()
print('Подготовка данных завершена')
print()
return img
except Exception as e:
print(str(e))
return None
def build_func_resnet18(file_model='', file_config='', num_classes=None):
try:
matplotlib.use('Agg')
plt.ioff()
torch.cuda.empty_cache()
config = mlconfig.load(file_config)
model = getattr(import_module(config.model.architecture.rsplit('.', maxsplit=1)[0]),
config.model.architecture.rsplit('.', maxsplit=1)[1])()
model.conv1 = nn.Sequential(nn.Conv2d(3, 3, kernel_size=(7, 7), stride=(2, 2),
padding=(3, 3), bias=False), model.conv1)
model.fc = nn.Sequential(
nn.Linear(in_features=512, out_features=128, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.7, inplace=False),
nn.Linear(in_features=128, out_features=32, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.7, inplace=False),
nn.Linear(in_features=32, out_features=16, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.7, inplace=False),
nn.Linear(in_features=16, out_features=3, bias=True)
)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if device != 'cpu':
model = model.to(device)
model.load_state_dict(torch.load(file_model, map_location=device))
model.eval()
cv2.destroyAllWindows()
gc.collect()
print('Инициализация модели завершена')
print()
return model
except Exception as exc:
print(str(exc))
return None
def inference_func_resnet18(data=None, model=None, mapping=None, shablon=''):
try:
cv2.destroyAllWindows()
gc.collect()
torch.cuda.empty_cache()
device = 'cuda' if torch.cuda.is_available() else 'cpu'
img = torch.unsqueeze(torch.tensor(data), 0).to(device)
with torch.no_grad():
output = model(img.float())
_, predict = torch.max(output.data, 1)
prediction = mapping[int(np.asarray(predict.cpu())[0])]
print('PREDICTION' + shablon + ': ' + prediction)
label = np.asarray(np.argmax(output, axis=1))[0]
output = np.asarray(torch.squeeze(output, 0))
expon = np.exp(output - np.max(output))
probability = round((expon / expon.sum())[label], 2)
del label
del expon
del output
cv2.destroyAllWindows()
gc.collect()
print('Уверенность' + shablon + ' в предсказании: ' + str(probability))
print('Инференс завершен')
print()
return [prediction, probability]
except Exception as exc:
print(str(exc))
return None
def post_func_resnet18(src='', model_type='', prediction='', model_id=0, ind_inference=0, data=None):
try:
matplotlib.use('Agg')
plt.ioff()
fig, ax = plt.subplots()
ax.imshow(data[0], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_real_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[1], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_imag_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
fig, ax = plt.subplots()
ax.imshow(data[2], cmap='gray')
plt.savefig(src + '_inference_' + str(ind_inference) + '_' + prediction + '_spec_' + str(
model_id) + '_' + model_type + '.png')
plt.clf()
plt.cla()
plt.close()
del fig
del ax
cv2.destroyAllWindows()
gc.collect()
print('Постобработка завершена')
print()
except Exception as exc:
print(str(exc))
return None

@ -0,0 +1,312 @@
from flask import Flask, request, jsonify
from dotenv import dotenv_values
from common.runtime import load_root_env, validate_env, as_int, as_str
import matplotlib.pyplot as plt
from Model import Model
import numpy as np
import matplotlib
import importlib
import threading
import requests
import asyncio
import shutil
import json
import gc
import os
import logging
logging.basicConfig(level=logging.INFO)
app = Flask(__name__)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
queue = asyncio.Queue()
semaphore = asyncio.Semaphore(3)
prediction_list = []
result_msg = {}
results = []
matplotlib.use('Agg')
plt.ioff()
alg_list = []
model_list = []
ROOT_ENV = load_root_env(__file__)
validate_env("NN_server/server.py", {
"GENERAL_SERVER_IP": as_str,
"GENERAL_SERVER_PORT": as_int,
"SERVER_IP": as_str,
"SERVER_PORT": as_int,
"PATH_TO_NN": as_str,
"SRC_RESULT": as_str,
"SRC_EXAMPLE": as_str,
})
config = dict(dotenv_values(ROOT_ENV))
if not config:
raise RuntimeError("[NN_server/server.py] .env was loaded but no keys were parsed")
if not any(key.startswith("NN_") for key in config):
raise RuntimeError("[NN_server/server.py] no NN_* model entries configured")
logging.info("NN config loaded from %s", ROOT_ENV)
gen_server_ip = config['GENERAL_SERVER_IP']
gen_server_port = config['GENERAL_SERVER_PORT']
def init_data_for_inference():
try:
if os.path.isdir(config['SRC_RESULT']):
shutil.rmtree(config['SRC_RESULT'])
os.mkdir(config['SRC_RESULT'])
if os.path.isdir(config['SRC_EXAMPLE']):
shutil.rmtree(config['SRC_EXAMPLE'])
os.mkdir(config['SRC_EXAMPLE'])
except Exception as exc:
print(str(exc))
print()
try:
global model_list
for key in config.keys():
if key.startswith('NN_'):
params = config[key].split(' && ')
module = importlib.import_module('Models.' + params[4])
classes = {}
for value in params[9][1:-1].split(','):
classes[len(classes)] = value
model = Model(file_model=params[0], file_config=params[1], src_example=params[2], src_result=params[3],
type_model=params[4], build_model_func=getattr(module, params[5]),
pre_func=getattr(module, params[6]), inference_func=getattr(module, params[7]),
post_func=getattr(module, params[8]), classes=classes, number_synthetic_examples=int(params[10]),
number_src_data_for_one_synthetic_example=int(params[11]), path_to_src_dataset=params[12])
model_list.append(model)
# if key.startswith('ALG_'):
# params = config[key].split(' && ')
# module = importlib.import_module('Algorithms.' + params[2])
# classes = {}
# for value in params[6][1:-1].split(','):
# classes[len(classes)] = value
# alg = Algorithm(src_example=params[0], src_result=params[1], type_alg=params[2], pre_func=getattr(module, params[3]),
# inference_func=getattr(module, params[4]), post_func=getattr(module, params[5]), classes=classes,
# number_synthetic_examples=int(params[7]), number_src_data_for_one_synthetic_example=int(params[8]), path_to_src_dataset=params[9])
# alg_list.append(alg)
except Exception as exc:
print(str(exc))
print()
def run_example():
try:
for model in model_list:
model.get_test_inference()
except Exception as exc:
print(str(exc))
@app.route('/receive_data', methods=['POST'])
def receive_data():
try:
print()
data = json.loads(request.json)
print('#' * 100)
print('Получен пакет ' + str(Model.get_ind_inference()))
freq = int(data['freq'])
print('Частота: ' + str(freq))
# print('Канал: ' + str(data['channel']))
result_msg = {}
data_to_send = {}
prediction_list = []
for model in model_list:
if str(freq) in model.get_model_name():
print('-' * 100)
print(str(model))
result_msg[str(model.get_model_name())] = {'freq': freq}
prediction, probability = model.get_inference([np.asarray(data['data_real'], dtype=np.float32), np.asarray(data['data_imag'], dtype=np.float32)])
result_msg[str(model.get_model_name())]['prediction'] = prediction
result_msg[str(model.get_model_name())]['probability'] = str(probability)
prediction_list.append(prediction)
print('-' * 100)
print()
try:
result = 0
if (int(freq) == 2400 and (prediction_list[0] in ['drone', 'drone_noise'] or (prediction_list[0] == 'wifi' and float(probability) >= 0.95))) or (int(freq) == 1200 and (prediction_list[0] in ['drone'] and float(probability) >= 0.95)):
result += 8
if int(freq) in [915]:
result = 0
if int(freq) in []:
result = 8
data_to_send={
'freq': str(freq),
'amplitude': result
#'triggered': False if result < 7 else True,
#'light_len': result
}
response = requests.post("http://{0}:{1}/process_data".format(gen_server_ip, gen_server_port), json=data_to_send)
if response.status_code == 200:
print("Данные успешно отправлены!")
print("Частота: " + str(freq))
print("Отправлено светодиодов: " + str(result))
else:
print("Ошибка при отправке данных: ", response.status_code)
except Exception as exc:
print(str(exc))
break
Model.get_inc_ind_inference()
print()
print('#' * 100)
for alg in alg_list:
print('-' * 100)
print(str(alg))
alg.get_inference([np.asarray(data['data_real'], dtype=np.float32), np.asarray(data['data_imag'], dtype=np.float32)])
print('-' * 100)
print()
#Algorithm.get_inc_ind_inference()
print()
print('#' * 100)
del data
gc.collect()
return jsonify(result_msg)
except Exception as exc:
print(str(exc))
'''
def run_flask():
app.run(host=config['SERVER_IP'], port=int(config['SERVER_PORT']))
async def process_tasks():
workers = [asyncio.create_task(worker(queue=queue, semaphore=semaphore)) for _ in range(2)]
await asyncio.gather(*workers)
async def main():
asyncio.create_task(process_tasks())
flask_thread = threading.Thread(target=run_flask)
flask_thread.start()
while True:
if queue.qsize() <= 1:
asyncio.create_task(process_tasks())
await asyncio.sleep(1)
@app.route('/receive_data', methods=['POST'])
def add_task():
queue_size = queue.qsize()
if queue_size > 1:
return {}
print()
data = json.loads(request.json)
print('#' * 100)
print('Получен пакет ' + str(Model.get_ind_inference()))
freq = int(data['freq'])
print('Частота ' + str(freq))
result_msg = {}
for model in model_list:
if str(freq) in model.get_model_name():
print('-' * 100)
print(str(model))
result_msg[str(model.get_model_name())] = {'freq': freq}
asyncio.run_coroutine_threadsafe(queue.put({'freq': freq, 'model': model, 'data': data}), loop)
do_inference(model=model, data=data, freq=freq)
break
del data
gc.collect()
return jsonify(result_msg)
async def worker(queue, semaphore):
while True:
task = await queue.get()
if task is None:
break
async with semaphore:
try:
await do_inference(model=task['model'], data=task['data'], freq=task['freq'])
except Exception as e:
print(str(e))
print(results)
queue.task_done()
async def do_inference(model=None, data=None, freq=0):
prediction_list = []
print("Длина очереди" + str(queue.qsize()))
inference(model=model, data=data, freq=freq)
try:
results = []
for pred in prediction_list:
if pred[1] == 'drone':
results.append([pred[0],8])
else:
results.append([pred[0],0])
for result in results:
try:
data_to_send={
'freq': result[0],
'amplitude': result[1],
'triggered': False if result[1] < 7 else True,
'light_len': result[1]
}
response = requests.post("http://{0}:{1}/process_data".format(gen_server_ip, gen_server_port), json=data_to_send)
await response.text
if response.status_code == 200:
print("Данные успешно отправлены!")
print("Отправлено светодиодов: " + str(data_to_send['light_len']))
else:
print("Ошибка при отправке данных: ", response.status_code)
except Exception as exc:
print(str(exc))
except Exception as exc:
print(str(exc))
Model.get_inc_ind_inference()
print()
print('#' * 100)
del data
gc.collect()
def inference(model=None, data=None, freq=0):
prediction, probability = model.get_inference([np.asarray(data['data_real'], dtype=np.float32), np.asarray(data['data_imag'], dtype=np.float32)])
result_msg[str(model.get_model_name())]['prediction'] = prediction
result_msg[str(model.get_model_name())]['probability'] = str(probability)
queue_size = queue.qsize()
print(queue_size)
prediction_list.append([freq, prediction])
print('-' * 100)
print()
if __name__ == '__main__':
init_data_for_inference()
#asyncio.run(main)
loop.run_until_complete(main())
'''
def run_flask():
print(config['SERVER_IP'])
app.run(host=config['SERVER_IP'], port=int(config['SERVER_PORT']))
if __name__ == '__main__':
init_data_for_inference()
flask_thread = threading.Thread(target=run_flask)
flask_thread.start()
#app.run(host=config['SERVER_IP'], port=int(config['SERVER_PORT']))

@ -0,0 +1,130 @@
# DroneDetector v2
Отдельный проектный контур (без миграции legacy), в котором:
- SDR-сканеры работают **на хосте** под `systemd`;
- `server_to_master` и `NN_server` работают в **Docker Compose**;
- весь runtime-конфиг хранится в **одном корневом `.env`**.
## 1. Быстрый старт
### Prerequisites
- Ubuntu/Debian (apt)
- NVIDIA GPU + установленный драйвер (`nvidia-smi` должен работать)
- Интернет для установки пакетов и сборки Docker-образов
- HackRF + GNU Radio стек (будет установлен через `install_all.sh`)
### Установка и запуск
```bash
cd /home/sibscience-4/from_ssh/DroneDetector
chmod +x install_all.sh
./install_all.sh
```
`install_all.sh`:
1. выполняет preflight;
2. ставит host non-python зависимости SDR;
3. настраивает Docker + NVIDIA runtime;
4. поднимает compose сервисы;
5. устанавливает/перезапускает `systemd` unit'ы;
6. проверяет статус, при ошибке печатает логи.
## 2. Матрица сервисов
### Host / systemd (SDR)
- `dronedetector-sdr-433.service` -> `src/main_433.py`
- `dronedetector-sdr-750.service` -> `src/main_750.py`
- `dronedetector-sdr-868.service` -> `src/main_868.py`
- `dronedetector-sdr-3300.service` -> `src/main_3300.py`
- `dronedetector-sdr-4500.service` -> `src/main_4500.py`
- `dronedetector-sdr-5200.service` -> `src/main_5200.py`
- `dronedetector-sdr-5800.service` -> `src/main_5800.py`
- `dronedetector-sdr-915.service` -> `orange_scripts/main_915.py`
- `dronedetector-sdr-1200.service` -> `orange_scripts/main_1200.py`
- `dronedetector-sdr-2400.service` -> `orange_scripts/main_2400.py`
### Docker Compose
- `dronedetector-server-to-master` -> `src/server_to_master.py`
- `dronedetector-nn-server` -> `NN_server/server.py`
Compose unit:
- `dronedetector-compose.service`
## 3. Конфигурация
Единственный источник runtime-конфига: `./.env`.
Все entrypoint'ы загружают root `.env` через `common/runtime.py` и валидируют обязательные переменные. При ошибке сервис падает сразу с понятным сообщением.
## 4. API (без изменения контрактов)
- NN_server: `POST /receive_data`
- server_to_master: `POST /process_data`
Форматы payload/ответов сохранены в текущей логике сервисов.
## 5. Диагностика
### systemd
```bash
systemctl status dronedetector-sdr-*.service
journalctl -u dronedetector-sdr-868.service -n 200 --no-pager
systemctl status dronedetector-compose.service
journalctl -u dronedetector-compose.service -n 200 --no-pager
```
### docker compose
```bash
docker compose -f deploy/docker/docker-compose.yml ps
docker compose -f deploy/docker/docker-compose.yml logs dronedetector-server-to-master
docker compose -f deploy/docker/docker-compose.yml logs dronedetector-nn-server
```
## 6. Host non-python dependencies
Устанавливаются `install_all.sh`:
- GNU Radio
- gr-osmosdr
- libhackrf/hackrf-tools (`hackrf` package)
- libusb
- udev-related runtime via distro packages
SDR precheck перед каждым unit запуском:
- наличие `hackrf_info`
- наличие `gnuradio-config-info`
- импорт `osmosdr`
- детект устройства HackRF
## 7. install_all.sh: параметры и поведение
Скрипт idempotent: повторный запуск допустим.
Что делает:
- preflight (OS, диск, `.env`, GPU)
- host deps
- `.venv-sdr` c `--system-site-packages`
- Docker Engine (если отсутствует)
- NVIDIA Container Toolkit
- `docker compose up -d --build`
- установка unit'ов в `/etc/systemd/system`
- verify + авто-логи при ошибке
## 8. Типовые ошибки `.env`
Примеры fail-fast сообщений:
- `[src/server_to_master.py] invalid .env configuration: ...`
- `[NN_server/server.py] no NN_* model entries configured`
- `[orange_scripts/compose_send_data_915.py] invalid .env configuration: ...`
Частые причины:
- пустое обязательное поле (`SERVER_PORT`, `lochost`, `hack_868` и т.д.)
- неверный тип (`SERVER_PORT=abc`)
- неправильный serial HackRF (не найден среди `lsusb -v -d 1d50:6089 | grep iSerial`)
## 9. Ручная приемка
1. `./install_all.sh` выполняется до конца.
2. `docker compose -f deploy/docker/docker-compose.yml up -d` поднимает оба контейнера.
3. Все `dronedetector-sdr-*` имеют `active (running)`.
4. Тестовый POST в `NN_server /receive_data` доходит до `server_to_master /process_data`.
5. Контур работает минимум 1 минуту без падений.

@ -0,0 +1 @@
"""Shared runtime helpers for DroneDetector."""

@ -0,0 +1,99 @@
import os
import subprocess
from pathlib import Path
from typing import Callable, Dict, Any
from dotenv import load_dotenv
class EnvValidationError(RuntimeError):
"""Raised when required environment variables are missing or malformed."""
def load_root_env(file_path: str) -> Path:
"""Load repository root .env by walking up from file_path."""
start = Path(file_path).resolve()
for parent in [start.parent, *start.parents]:
env_file = parent / ".env"
if env_file.exists():
load_dotenv(env_file, override=True)
return env_file
raise EnvValidationError(f"Root .env was not found for {file_path}")
def as_int(raw: str) -> int:
return int(str(raw).strip())
def as_float(raw: str) -> float:
return float(str(raw).strip())
def as_str(raw: str) -> str:
value = str(raw).strip()
if value.startswith("\"") and value.endswith("\""):
value = value[1:-1]
if value.startswith("'") and value.endswith("'"):
value = value[1:-1]
return value
def as_bool(raw: str) -> bool:
value = as_str(raw).lower()
if value in {"1", "true", "yes", "y", "on"}:
return True
if value in {"0", "false", "no", "n", "off"}:
return False
raise ValueError("expected one of 1/0 true/false yes/no on/off")
def validate_env(source: str, schema: Dict[str, Callable[[str], Any]]) -> Dict[str, Any]:
"""Validate required env vars against simple caster schema."""
values: Dict[str, Any] = {}
errors = []
for key, caster in schema.items():
raw = os.getenv(key)
if raw is None or str(raw).strip() == "":
errors.append(f"{key}: missing")
continue
try:
values[key] = caster(raw)
except Exception as exc: # pragma: no cover - used in runtime only
errors.append(f"{key}: invalid value {raw!r} ({exc})")
if errors:
msg = "\n - " + "\n - ".join(errors)
raise EnvValidationError(f"[{source}] invalid .env configuration:{msg}")
return values
def resolve_hackrf_index(serial_env_key: str, source: str) -> str:
"""Resolve HackRF index from expected serial in env."""
serial = validate_env(source, {serial_env_key: as_str})[serial_env_key]
try:
output = subprocess.check_output(
"lsusb -v -d 1d50:6089 | grep iSerial",
shell=True,
text=True,
)
except subprocess.CalledProcessError as exc:
raise EnvValidationError(
f"[{source}] could not read HackRF serials via lsusb: {exc}"
) from exc
lines = [line.strip() for line in output.splitlines() if line.strip()]
if not lines:
raise EnvValidationError(
f"[{source}] no HackRF devices found (lsusb returned empty serial list)"
)
serials = [line.split()[-1] for line in lines]
if serial not in serials:
raise EnvValidationError(
f"[{source}] serial {serial!r} not found among connected HackRF devices: {serials}"
)
return str(serials.index(serial))

@ -0,0 +1,34 @@
FROM nvidia/cuda:12.8.1-cudnn-runtime-ubuntu22.04
ENV DEBIAN_FRONTEND=noninteractive \
PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
PYTHONPATH=/app
WORKDIR /app
RUN apt-get update && apt-get install -y --no-install-recommends \
python3 \
python3-pip \
python3-venv \
git \
libglib2.0-0 \
libsm6 \
libxext6 \
libxrender-dev \
&& rm -rf /var/lib/apt/lists/*
COPY deploy/requirements/nn_gpu_pinned.txt /tmp/nn_gpu_pinned.txt
COPY deploy/requirements/nn_common.txt /tmp/nn_common.txt
RUN python3 -m pip install --no-cache-dir --upgrade pip && \
python3 -m pip install --no-cache-dir -r /tmp/nn_gpu_pinned.txt && \
python3 -m pip install --no-cache-dir -r /tmp/nn_common.txt
COPY . /app
RUN python3 -m pip install --no-cache-dir -e /app/torchsig
WORKDIR /app
EXPOSE 8080
CMD ["python3", "-m", "NN_server.server"]

@ -0,0 +1,20 @@
FROM python:3.11-slim
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
PYTHONPATH=/app
WORKDIR /app
RUN apt-get update && apt-get install -y --no-install-recommends \
gcc \
&& rm -rf /var/lib/apt/lists/*
COPY deploy/requirements/server_to_master.txt /tmp/requirements.txt
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir -r /tmp/requirements.txt
COPY . /app
EXPOSE 5000
CMD ["python3", "-m", "src.server_to_master"]

@ -0,0 +1,43 @@
services:
dronedetector-server-to-master:
container_name: dronedetector-server-to-master
build:
context: ../..
dockerfile: deploy/docker/Dockerfile.server_to_master
env_file:
- ../../.env
environment:
- PYTHONPATH=/app
working_dir: /app
command: ["python3", "-m", "src.server_to_master"]
restart: unless-stopped
ports:
- "5000:5000"
networks:
- dronedetector-net
dronedetector-nn-server:
container_name: dronedetector-nn-server
build:
context: ../..
dockerfile: deploy/docker/Dockerfile.nn_server
env_file:
- ../../.env
environment:
- PYTHONPATH=/app
working_dir: /app
command: ["python3", "-m", "NN_server.server"]
restart: unless-stopped
depends_on:
- dronedetector-server-to-master
ports:
- "8080:8080"
volumes:
- ../../NN_server/result:/app/NN_server/result
gpus: all
networks:
- dronedetector-net
networks:
dronedetector-net:
name: dronedetector-net

@ -0,0 +1,11 @@
flask==3.1.0
python-dotenv==1.0.1
numpy==2.1.3
matplotlib==3.10.0
tqdm==4.67.1
requests==2.32.3
pyyaml==6.0.2
mlconfig==0.3.2
scikit-learn==1.6.0
torchensemble==0.2.0
opencv-python-headless==4.10.0.84

@ -0,0 +1,6 @@
--index-url https://download.pytorch.org/whl/cu128
--extra-index-url https://pypi.org/simple
torch==2.10.0+cu128
torchvision==0.25.0+cu128
torchaudio==2.10.0+cu128

@ -0,0 +1,6 @@
python-dotenv==1.0.1
numpy==1.26.4
requests==2.32.3
pysmb==1.2.10
pynmea2==1.19.0
pyserial==3.5

@ -0,0 +1,6 @@
fastapi==0.115.6
uvicorn[standard]==0.32.1
httpx==0.28.1
requests==2.32.3
websockets==12.0
python-dotenv==1.0.1

@ -0,0 +1,17 @@
[Unit]
Description=DroneDetector Docker Compose Services
After=network-online.target docker.service
Wants=network-online.target
Requires=docker.service
[Service]
Type=oneshot
WorkingDirectory=__PROJECT_ROOT__
RemainAfterExit=yes
ExecStart=/usr/bin/docker compose -f __PROJECT_ROOT__/deploy/docker/docker-compose.yml up -d --build
ExecStop=/usr/bin/docker compose -f __PROJECT_ROOT__/deploy/docker/docker-compose.yml down
ExecReload=/usr/bin/docker compose -f __PROJECT_ROOT__/deploy/docker/docker-compose.yml up -d --build
TimeoutStartSec=0
[Install]
WantedBy=multi-user.target

@ -0,0 +1,19 @@
[Unit]
Description=DroneDetector SDR Scanner 1200 MHz
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
User=__RUN_USER__
Group=__RUN_GROUP__
WorkingDirectory=__PROJECT_ROOT__
EnvironmentFile=__PROJECT_ROOT__/.env
Environment=PYTHONPATH=__PROJECT_ROOT__
ExecStartPre=/usr/local/bin/dronedetector-precheck-sdr.sh
ExecStart=__PROJECT_ROOT__/.venv-sdr/bin/python orange_scripts/main_1200.py
Restart=always
RestartSec=3
[Install]
WantedBy=multi-user.target

@ -0,0 +1,19 @@
[Unit]
Description=DroneDetector SDR Scanner 2400 MHz
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
User=__RUN_USER__
Group=__RUN_GROUP__
WorkingDirectory=__PROJECT_ROOT__
EnvironmentFile=__PROJECT_ROOT__/.env
Environment=PYTHONPATH=__PROJECT_ROOT__
ExecStartPre=/usr/local/bin/dronedetector-precheck-sdr.sh
ExecStart=__PROJECT_ROOT__/.venv-sdr/bin/python orange_scripts/main_2400.py
Restart=always
RestartSec=3
[Install]
WantedBy=multi-user.target

@ -0,0 +1,19 @@
[Unit]
Description=DroneDetector SDR Scanner 3300 MHz
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
User=__RUN_USER__
Group=__RUN_GROUP__
WorkingDirectory=__PROJECT_ROOT__
EnvironmentFile=__PROJECT_ROOT__/.env
Environment=PYTHONPATH=__PROJECT_ROOT__
ExecStartPre=/usr/local/bin/dronedetector-precheck-sdr.sh
ExecStart=__PROJECT_ROOT__/.venv-sdr/bin/python src/main_3300.py
Restart=always
RestartSec=3
[Install]
WantedBy=multi-user.target

@ -0,0 +1,19 @@
[Unit]
Description=DroneDetector SDR Scanner 433 MHz
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
User=__RUN_USER__
Group=__RUN_GROUP__
WorkingDirectory=__PROJECT_ROOT__
EnvironmentFile=__PROJECT_ROOT__/.env
Environment=PYTHONPATH=__PROJECT_ROOT__
ExecStartPre=/usr/local/bin/dronedetector-precheck-sdr.sh
ExecStart=__PROJECT_ROOT__/.venv-sdr/bin/python src/main_433.py
Restart=always
RestartSec=3
[Install]
WantedBy=multi-user.target

@ -0,0 +1,19 @@
[Unit]
Description=DroneDetector SDR Scanner 4500 MHz
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
User=__RUN_USER__
Group=__RUN_GROUP__
WorkingDirectory=__PROJECT_ROOT__
EnvironmentFile=__PROJECT_ROOT__/.env
Environment=PYTHONPATH=__PROJECT_ROOT__
ExecStartPre=/usr/local/bin/dronedetector-precheck-sdr.sh
ExecStart=__PROJECT_ROOT__/.venv-sdr/bin/python src/main_4500.py
Restart=always
RestartSec=3
[Install]
WantedBy=multi-user.target

@ -0,0 +1,19 @@
[Unit]
Description=DroneDetector SDR Scanner 5200 MHz
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
User=__RUN_USER__
Group=__RUN_GROUP__
WorkingDirectory=__PROJECT_ROOT__
EnvironmentFile=__PROJECT_ROOT__/.env
Environment=PYTHONPATH=__PROJECT_ROOT__
ExecStartPre=/usr/local/bin/dronedetector-precheck-sdr.sh
ExecStart=__PROJECT_ROOT__/.venv-sdr/bin/python src/main_5200.py
Restart=always
RestartSec=3
[Install]
WantedBy=multi-user.target

@ -0,0 +1,19 @@
[Unit]
Description=DroneDetector SDR Scanner 5800 MHz
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
User=__RUN_USER__
Group=__RUN_GROUP__
WorkingDirectory=__PROJECT_ROOT__
EnvironmentFile=__PROJECT_ROOT__/.env
Environment=PYTHONPATH=__PROJECT_ROOT__
ExecStartPre=/usr/local/bin/dronedetector-precheck-sdr.sh
ExecStart=__PROJECT_ROOT__/.venv-sdr/bin/python src/main_5800.py
Restart=always
RestartSec=3
[Install]
WantedBy=multi-user.target

@ -0,0 +1,19 @@
[Unit]
Description=DroneDetector SDR Scanner 750 MHz
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
User=__RUN_USER__
Group=__RUN_GROUP__
WorkingDirectory=__PROJECT_ROOT__
EnvironmentFile=__PROJECT_ROOT__/.env
Environment=PYTHONPATH=__PROJECT_ROOT__
ExecStartPre=/usr/local/bin/dronedetector-precheck-sdr.sh
ExecStart=__PROJECT_ROOT__/.venv-sdr/bin/python src/main_750.py
Restart=always
RestartSec=3
[Install]
WantedBy=multi-user.target

@ -0,0 +1,19 @@
[Unit]
Description=DroneDetector SDR Scanner 868 MHz
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
User=__RUN_USER__
Group=__RUN_GROUP__
WorkingDirectory=__PROJECT_ROOT__
EnvironmentFile=__PROJECT_ROOT__/.env
Environment=PYTHONPATH=__PROJECT_ROOT__
ExecStartPre=/usr/local/bin/dronedetector-precheck-sdr.sh
ExecStart=__PROJECT_ROOT__/.venv-sdr/bin/python src/main_868.py
Restart=always
RestartSec=3
[Install]
WantedBy=multi-user.target

@ -0,0 +1,19 @@
[Unit]
Description=DroneDetector SDR Scanner 915 MHz
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
User=__RUN_USER__
Group=__RUN_GROUP__
WorkingDirectory=__PROJECT_ROOT__
EnvironmentFile=__PROJECT_ROOT__/.env
Environment=PYTHONPATH=__PROJECT_ROOT__
ExecStartPre=/usr/local/bin/dronedetector-precheck-sdr.sh
ExecStart=__PROJECT_ROOT__/.venv-sdr/bin/python orange_scripts/main_915.py
Restart=always
RestartSec=3
[Install]
WantedBy=multi-user.target

@ -0,0 +1,22 @@
#!/usr/bin/env bash
set -euo pipefail
if ! command -v hackrf_info >/dev/null 2>&1; then
echo "[dronedetector-precheck] hackrf_info not found. Install hackrf-tools/hackrf package." >&2
exit 1
fi
if ! command -v gnuradio-config-info >/dev/null 2>&1; then
echo "[dronedetector-precheck] gnuradio-config-info not found. Install gnuradio." >&2
exit 1
fi
if ! python3 -c "import osmosdr" >/dev/null 2>&1; then
echo "[dronedetector-precheck] Python module osmosdr not importable." >&2
exit 1
fi
if ! hackrf_info 2>/dev/null | grep -q "Found HackRF"; then
echo "[dronedetector-precheck] HackRF device was not detected by hackrf_info." >&2
exit 1
fi

@ -0,0 +1,246 @@
#!/usr/bin/env bash
set -Eeuo pipefail
PROJECT_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
COMPOSE_FILE="${PROJECT_ROOT}/deploy/docker/docker-compose.yml"
SYSTEMD_TARGET_DIR="/etc/systemd/system"
RUN_USER="${SUDO_USER:-${USER}}"
RUN_GROUP="$(id -gn "${RUN_USER}")"
SDR_UNITS=(
dronedetector-sdr-433.service
dronedetector-sdr-750.service
dronedetector-sdr-868.service
dronedetector-sdr-3300.service
dronedetector-sdr-4500.service
dronedetector-sdr-5200.service
dronedetector-sdr-5800.service
dronedetector-sdr-915.service
dronedetector-sdr-1200.service
dronedetector-sdr-2400.service
)
log() {
printf '[install_all] %s\n' "$*"
}
die() {
printf '[install_all] ERROR: %s\n' "$*" >&2
exit 1
}
print_failure_logs() {
log "Collecting diagnostics..."
systemctl --no-pager --full status dronedetector-compose.service || true
for unit in "${SDR_UNITS[@]}"; do
systemctl --no-pager --full status "$unit" || true
done
if command -v docker >/dev/null 2>&1; then
docker compose -f "$COMPOSE_FILE" ps || true
docker compose -f "$COMPOSE_FILE" logs --tail=150 dronedetector-server-to-master || true
docker compose -f "$COMPOSE_FILE" logs --tail=150 dronedetector-nn-server || true
fi
journalctl -u dronedetector-compose.service -n 150 --no-pager || true
for unit in "${SDR_UNITS[@]}"; do
journalctl -u "$unit" -n 120 --no-pager || true
done
}
trap 'rc=$?; if [[ $rc -ne 0 ]]; then print_failure_logs; fi' EXIT
require_root() {
if [[ "${EUID}" -ne 0 ]]; then
log "Switching to root via sudo..."
exec sudo -E bash "$0" "$@"
fi
}
preflight() {
log "Preflight checks"
[[ -f "${PROJECT_ROOT}/.env" ]] || die "Missing ${PROJECT_ROOT}/.env"
[[ -f "${COMPOSE_FILE}" ]] || die "Missing ${COMPOSE_FILE}"
if ! command -v apt-get >/dev/null 2>&1; then
die "This installer currently supports Debian/Ubuntu only (apt-get required)."
fi
local free_mb
free_mb="$(df -Pm "${PROJECT_ROOT}" | awk 'NR==2 {print $4}')"
if [[ -z "$free_mb" || "$free_mb" -lt 10240 ]]; then
die "At least 10 GB free disk space is required."
fi
if ! command -v nvidia-smi >/dev/null 2>&1; then
die "nvidia-smi is required. GPU/NVIDIA driver is not available on host."
fi
log "Preflight OK"
}
install_host_non_python_deps() {
log "Installing host non-python dependencies"
apt-get update
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
ca-certificates \
curl \
gnupg \
lsb-release \
jq \
git \
python3 \
python3-pip \
python3-venv \
build-essential \
pkg-config \
libusb-1.0-0 \
libusb-1.0-0-dev \
hackrf \
gnuradio \
gr-osmosdr
}
setup_sdr_python_env() {
log "Setting up SDR python environment"
local venv_path="${PROJECT_ROOT}/.venv-sdr"
if [[ ! -d "$venv_path" ]]; then
python3 -m venv --system-site-packages "$venv_path"
fi
"$venv_path/bin/pip" install --upgrade pip
"$venv_path/bin/pip" install -r "${PROJECT_ROOT}/deploy/requirements/sdr_host.txt"
chown -R "${RUN_USER}:${RUN_GROUP}" "$venv_path"
}
install_docker_if_needed() {
if command -v docker >/dev/null 2>&1; then
log "Docker already installed"
return
fi
log "Installing Docker Engine"
. /etc/os-release
local distro_id="${ID}"
if [[ "$distro_id" != "ubuntu" && "$distro_id" != "debian" ]]; then
die "Unsupported distro for Docker auto-install: ${distro_id}"
fi
install -m 0755 -d /etc/apt/keyrings
curl -fsSL "https://download.docker.com/linux/${distro_id}/gpg" | gpg --dearmor -o /etc/apt/keyrings/docker.gpg
chmod a+r /etc/apt/keyrings/docker.gpg
echo \
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/${distro_id} \
${VERSION_CODENAME} stable" | tee /etc/apt/sources.list.d/docker.list >/dev/null
apt-get update
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
systemctl enable --now docker
}
install_nvidia_container_toolkit() {
log "Installing/Configuring NVIDIA Container Toolkit"
if ! dpkg -s nvidia-container-toolkit >/dev/null 2>&1; then
curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | \
gpg --dearmor -o /usr/share/keyrings/nvidia-container-toolkit-keyring.gpg
curl -s -L https://nvidia.github.io/libnvidia-container/stable/deb/nvidia-container-toolkit.list | \
sed 's#deb https://#deb [signed-by=/usr/share/keyrings/nvidia-container-toolkit-keyring.gpg] https://#g' | \
tee /etc/apt/sources.list.d/nvidia-container-toolkit.list
apt-get update
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends nvidia-container-toolkit
fi
nvidia-ctk runtime configure --runtime=docker
systemctl restart docker
}
build_and_run_compose() {
log "Building and starting Docker services"
docker compose -f "$COMPOSE_FILE" up -d --build
}
install_systemd_units() {
log "Installing systemd units"
install -m 0755 "${PROJECT_ROOT}/deploy/systemd/precheck-sdr.sh" /usr/local/bin/dronedetector-precheck-sdr.sh
local src dst
for src in "${PROJECT_ROOT}"/deploy/systemd/*.service; do
dst="${SYSTEMD_TARGET_DIR}/$(basename "$src")"
sed \
-e "s|__PROJECT_ROOT__|${PROJECT_ROOT}|g" \
-e "s|__RUN_USER__|${RUN_USER}|g" \
-e "s|__RUN_GROUP__|${RUN_GROUP}|g" \
"$src" > "$dst"
done
systemctl daemon-reload
systemctl enable dronedetector-compose.service
systemctl restart dronedetector-compose.service
for unit in "${SDR_UNITS[@]}"; do
systemctl enable "$unit"
systemctl restart "$unit"
done
}
wait_for_systemd_active() {
local unit="$1"
local timeout_seconds="${2:-60}"
local i
for ((i=0; i<timeout_seconds; i++)); do
if systemctl is-active --quiet "$unit"; then
return 0
fi
sleep 1
done
return 1
}
verify_installation() {
log "Verifying services"
wait_for_systemd_active dronedetector-compose.service 30 || die "dronedetector-compose.service is not active"
for unit in "${SDR_UNITS[@]}"; do
wait_for_systemd_active "$unit" 45 || die "$unit is not active"
done
docker compose -f "$COMPOSE_FILE" ps
local running_services
running_services="$(docker compose -f "$COMPOSE_FILE" ps --status running --services || true)"
printf '%s\n' "$running_services" | grep -Fxq "dronedetector-server-to-master" || die "server_to_master is not running"
printf '%s\n' "$running_services" | grep -Fxq "dronedetector-nn-server" || die "NN_server is not running"
log "Verification completed"
}
main() {
require_root "$@"
log "Project root: ${PROJECT_ROOT}"
log "Runtime user: ${RUN_USER}:${RUN_GROUP}"
preflight
install_host_non_python_deps
setup_sdr_python_env
install_docker_if_needed
install_nvidia_container_toolkit
build_and_run_compose
install_systemd_units
verify_installation
log "SUCCESS: DroneDetector installation completed"
}
main "$@"

@ -0,0 +1,114 @@
from common.runtime import load_root_env, validate_env, as_float, as_int, as_str
import numpy as np
import requests
import os
import sys
import json
import time
load_root_env(__file__)
validate_env("orange_scripts/compose_send_data_1200.py", {
"POROG_1200": as_float,
"SERVER_IP_2": as_str,
"SERVER_PORT_2": as_int,
})
porog = float(os.getenv('POROG_1200'))
server_ip_1 = os.getenv('SERVER_IP_1')
server_port_1 = os.getenv('SERVER_PORT_1')
server_ip_2 = os.getenv('SERVER_IP_2')
server_port_2 = os.getenv('SERVER_PORT_2')
PARAMS = {'split_size': 400_000, 'point_amount': 100_000}
PARAMS['show_amount'] = 0.8 * PARAMS['point_amount']
token = 0
channel = 1
flag = 0
##############################
# HYPERPARAMETERS
##############################
f_base = 1.1e9
f_step = 20e6
f_roof = 1.3e9
##############################
# Variables
##############################
f = f_base
EOCF = 0
signal_arr = []
class NumpyArrayEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.ndarray):
return obj.tolist()
else:
return super(NumpyArrayEncoder, self).default(obj)
def send_data(sig):
try:
global token
print('#' * 10)
print('\nОтправка пакета ' + str(token+1))
data_to_send = {
"freq": 1200,
"channel": int(channel),
"token": int(token+1),
"data_real": np.asarray(np.array(sig, dtype=np.complex64).real, dtype=np.float32),
"data_imag": np.asarray(np.array(sig, dtype=np.complex64).imag, dtype=np.float32)
}
mod_data_to_send = json.dumps(data_to_send, cls=NumpyArrayEncoder)
response = requests.post("http://{0}:{1}/receive_data".format(server_ip_2, server_port_2), json=mod_data_to_send)
if response.status_code == 200:
token += 1
print(response.text)
print('#' * 10)
else:
print("Ошибка при отправке данных: ", response.status_code)
print('#' * 10)
except Exception as exc:
print(str(exc))
def median(sig):
global flag
median = abs(float(np.median(sorted(np.asarray(np.abs(np.array(sig, dtype=np.complex64)), dtype=np.float32))[int(PARAMS['show_amount']):])))
flag = 0 if porog > median else 1
print(channel, median, flag)
def work(lvl):
global flag
global channel
global f_base
global f_step
global f_roof
global f
global EOCF
global signal_arr
y = np.array(lvl).ravel()
signal_arr = np.concatenate((signal_arr, y), axis=None)
if f >= f_roof:
f = f_base
signal_arr = []
channel = 1
return f, EOCF
else:
if flag == 0 and len(signal_arr) >= PARAMS['point_amount']:
median(signal_arr[:PARAMS['point_amount']])
signal_arr = []
if flag == 0:
f += f_step
channel += 1
if len(signal_arr) >= PARAMS['split_size']:
send_data(signal_arr[:PARAMS['split_size']])
flag = 0
signal_arr = []
channel += 1
f += f_step
return f, EOCF

@ -0,0 +1,115 @@
from common.runtime import load_root_env, validate_env, as_float, as_int, as_str
import numpy as np
import requests
import os
import sys
import json
import time
load_root_env(__file__)
validate_env("orange_scripts/compose_send_data_2400.py", {
"POROG_2400": as_float,
"SERVER_IP_2": as_str,
"SERVER_PORT_2": as_int,
})
porog = float(os.getenv('POROG_2400'))
server_ip_1 = os.getenv('SERVER_IP_1')
server_port_1 = os.getenv('SERVER_PORT_1')
server_ip_2 = os.getenv('SERVER_IP_2')
server_port_2 = os.getenv('SERVER_PORT_2')
PARAMS = {'split_size': 400_000, 'point_amount': 100_000}
PARAMS['show_amount'] = 0.8 * PARAMS['point_amount']
token = 0
channel = 1
flag = 0
##############################
# HYPERPARAMETERS
##############################
f_base = 2.4e9
f_step = 20e6
f_roof = 2.5e9
##############################
# Variables
##############################
f = f_base
EOCF = 0
signal_arr = []
class NumpyArrayEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.ndarray):
return obj.tolist()
else:
return super(NumpyArrayEncoder, self).default(obj)
def send_data(sig):
try:
global token
print('#' * 10)
print('\nОтправка пакета ' + str(token+1))
data_to_send = {
"freq": 2400,
"channel": int(channel),
"token": int(token+1),
"data_real": np.asarray(np.array(sig, dtype=np.complex64).real, dtype=np.float32),
"data_imag": np.asarray(np.array(sig, dtype=np.complex64).imag, dtype=np.float32)
}
mod_data_to_send = json.dumps(data_to_send, cls=NumpyArrayEncoder)
response = requests.post("http://{0}:{1}/receive_data".format(server_ip_2, server_port_2), json=mod_data_to_send)
if response.status_code == 200:
token += 1
print(response.text)
print('#' * 10)
else:
print("Ошибка при отправке данных: ", response.status_code)
print('#' * 10)
except Exception as exc:
print(str(exc))
def median(sig):
global flag
median = abs(float(np.median(sorted(np.asarray(np.abs(np.array(sig, dtype=np.complex64)), dtype=np.float32))[int(PARAMS['show_amount']):])))
flag = 0 if porog > median else 1
print(channel, median, flag)
def work(lvl):
global flag
global channel
global f_base
global f_step
global f_roof
global f
global EOCF
global signal_arr
y = np.array(lvl).ravel()
signal_arr = np.concatenate((signal_arr, y), axis=None)
if f >= f_roof:
f = f_base
signal_arr = []
channel = 1
return f, EOCF
else:
if flag == 0 and len(signal_arr) >= PARAMS['point_amount']:
median(signal_arr[:PARAMS['point_amount']])
signal_arr = []
if flag == 0:
f += f_step
channel += 1
if len(signal_arr) >= PARAMS['split_size']:
send_data(signal_arr[:PARAMS['split_size']])
flag = 0
signal_arr = []
channel += 1
f += f_step
return f, EOCF

@ -0,0 +1,115 @@
from common.runtime import load_root_env, validate_env, as_float, as_int, as_str
import numpy as np
import requests
import os
import sys
import json
import time
load_root_env(__file__)
validate_env("orange_scripts/compose_send_data_915.py", {
"POROG_915": as_float,
"SERVER_IP_1": as_str,
"SERVER_PORT_1": as_int,
})
porog = float(os.getenv('POROG_915'))
server_ip_1 = os.getenv('SERVER_IP_1')
server_port_1 = os.getenv('SERVER_PORT_1')
server_ip_2 = os.getenv('SERVER_IP_2')
server_port_2 = os.getenv('SERVER_PORT_2')
PARAMS = {'split_size': 400_000, 'point_amount': 100_000}
PARAMS['show_amount'] = 0.8 * PARAMS['point_amount']
token = 0
channel = 1
flag = 0
##############################
# HYPERPARAMETERS
##############################
f_base = 0.91e9
f_step = 20e6
f_roof = 0.98e9
##############################
# Variables
##############################
f = f_base
EOCF = 0
signal_arr = []
class NumpyArrayEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.ndarray):
return obj.tolist()
else:
return super(NumpyArrayEncoder, self).default(obj)
def send_data(sig):
try:
global token
print('#' * 10)
print('\nОтправка пакета ' + str(token+1))
data_to_send = {
"freq": 915,
"channel": int(channel),
"token": int(token+1),
"data_real": np.asarray(np.array(sig, dtype=np.complex64).real, dtype=np.float32),
"data_imag": np.asarray(np.array(sig, dtype=np.complex64).imag, dtype=np.float32)
}
mod_data_to_send = json.dumps(data_to_send, cls=NumpyArrayEncoder)
response = requests.post("http://{0}:{1}/receive_data".format(server_ip_1, server_port_1), json=mod_data_to_send)
if response.status_code == 200:
token += 1
print(response.text)
print('#' * 10)
else:
print("Ошибка при отправке данных: ", response.status_code)
print('#' * 10)
except Exception as exc:
print(str(exc))
def median(sig):
global flag
median = abs(float(np.median(sorted(np.asarray(np.abs(np.array(sig, dtype=np.complex64)), dtype=np.float32))[int(PARAMS['show_amount']):])))
flag = 0 if porog > median else 1
print(channel, median, flag)
def work(lvl):
global flag
global channel
global f_base
global f_step
global f_roof
global f
global EOCF
global signal_arr
y = np.array(lvl).ravel()
signal_arr = np.concatenate((signal_arr, y), axis=None)
if f >= f_roof:
f = f_base
signal_arr = []
channel = 1
return f, EOCF
else:
if flag == 0 and len(signal_arr) >= PARAMS['point_amount']:
median(signal_arr[:PARAMS['point_amount']])
signal_arr = []
if flag == 0:
f += f_step
channel += 1
if len(signal_arr) >= PARAMS['split_size']:
send_data(signal_arr[:PARAMS['split_size']])
flag = 0
signal_arr = []
channel += 1
f += f_step
return f, EOCF

@ -0,0 +1,173 @@
from gnuradio import blocks, gr
import sys
import signal
import compose_send_data_1200 as my_freq
import osmosdr
import time
import threading
import subprocess
import os
from common.runtime import load_root_env, resolve_hackrf_index
load_root_env(__file__)
def get_hack_id():
return resolve_hackrf_index('HACKID_1200', 'orange_scripts/main_1200.py')
serial_number = os.getenv('HACKID_1200')
pos = None
output = []
try:
command = 'lsusb -v -d 1d50:6089 | grep iSerial'
output.append(subprocess.check_output(command, shell=True, text=True))
except subprocess.CalledProcessError as e:
print(f"Команда завершилась с кодом возврата {e.returncode}")
print(e)
print(output)
output_lines = output[0].strip().split('\n')
print(output_lines)
serial_numbers = [line.split()[-1] for line in output_lines]
print(serial_numbers)
for i, number in enumerate(serial_numbers):
if number == serial_number:
id = i
break
if id is not None:
print('HackId is: {0}'.format(id))
return str(id)
else:
print('Такого хака нет!')
class get_center_freq(gr.top_block):
def __init__(self):
gr.top_block.__init__(self, "get_center_freq")
##################################################
# Variables
##################################################
self.prob_freq = prob_freq = 0
self.top_peaks_amount = top_peaks_amount = 20
self.samp_rate = samp_rate = 20e6
self.poll_rate = poll_rate = 10000
self.num_points = num_points = 8192
self.flag = flag = 1
self.decimation = decimation = 1
self.center_freq = center_freq = my_freq.work(prob_freq)[0]
##################################################
# Blocks
##################################################
self.probSigVec = blocks.probe_signal_vc(4096)
self.rtlsdr_source_0 = osmosdr.source(
args="numchan=" + str(1) + " " + 'hackrf=' + get_hack_id()
)
self.rtlsdr_source_0.set_time_unknown_pps(osmosdr.time_spec_t())
self.rtlsdr_source_0.set_sample_rate(samp_rate)
self.rtlsdr_source_0.set_center_freq(center_freq, 0)
self.rtlsdr_source_0.set_freq_corr(0, 0)
self.rtlsdr_source_0.set_gain(16, 0)
self.rtlsdr_source_0.set_if_gain(16, 0)
self.rtlsdr_source_0.set_bb_gain(0, 0)
self.rtlsdr_source_0.set_antenna('', 0)
self.rtlsdr_source_0.set_bandwidth(0, 0)
self.rtlsdr_source_0.set_min_output_buffer(4096)
def _prob_freq_probe():
while True:
val = self.probSigVec.level()
try:
self.set_prob_freq(val)
except AttributeError:
pass
time.sleep(1.0 / (poll_rate))
_prob_freq_thread = threading.Thread(target=_prob_freq_probe)
_prob_freq_thread.daemon = True
_prob_freq_thread.start()
self.blocks_stream_to_vector_1 = blocks.stream_to_vector(gr.sizeof_gr_complex*1, 4096)
##################################################
# Connections
##################################################
self.connect((self.blocks_stream_to_vector_1, 0), (self.probSigVec, 0))
self.connect((self.rtlsdr_source_0, 0), (self.blocks_stream_to_vector_1, 0))
def get_prob_freq(self):
return self.prob_freq
def set_prob_freq(self, prob_freq):
self.prob_freq = prob_freq
self.set_center_freq(my_freq.work(self.prob_freq)[0])
def get_top_peaks_amount(self):
return self.top_peaks_amount
def set_top_peaks_amount(self, top_peaks_amount):
self.top_peaks_amount = top_peaks_amount
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self.rtlsdr_source_0.set_sample_rate(self.samp_rate)
def get_poll_rate(self):
return self.poll_rate
def set_poll_rate(self, poll_rate):
self.poll_rate = poll_rate
def get_num_points(self):
return self.num_points
def set_num_points(self, num_points):
self.num_points = num_points
def get_flag(self):
return self.flag
def set_flag(self, flag):
self.flag = flag
def get_decimation(self):
return self.decimation
def set_decimation(self, decimation):
self.decimation = decimation
def get_center_freq(self):
return self.center_freq
def set_center_freq(self, center_freq):
self.center_freq = center_freq
self.rtlsdr_source_0.set_center_freq(self.center_freq, 0)
def main(top_block_cls=get_center_freq, options=None):
time.sleep(3)
tb = top_block_cls()
def sig_handler(sig=None, frame=None):
tb.stop()
tb.wait()
sys.exit(0)
signal.signal(signal.SIGINT, sig_handler)
signal.signal(signal.SIGTERM, sig_handler)
tb.start()
try:
input('Press Enter to quit: ')
except EOFError:
pass
tb.wait()
if __name__ == '__main__':
main()

@ -0,0 +1,173 @@
from gnuradio import blocks, gr
import sys
import signal
import compose_send_data_2400 as my_freq
import osmosdr
import time
import threading
import subprocess
import os
from common.runtime import load_root_env, resolve_hackrf_index
load_root_env(__file__)
def get_hack_id():
return resolve_hackrf_index('HACKID_2400', 'orange_scripts/main_2400.py')
serial_number = os.getenv('HACKID_2400')
pos = None
output = []
try:
command = 'lsusb -v -d 1d50:6089 | grep iSerial'
output.append(subprocess.check_output(command, shell=True, text=True))
except subprocess.CalledProcessError as e:
print(f"Команда завершилась с кодом возврата {e.returncode}")
print(e)
print(output)
output_lines = output[0].strip().split('\n')
print(output_lines)
serial_numbers = [line.split()[-1] for line in output_lines]
print(serial_numbers)
for i, number in enumerate(serial_numbers):
if number == serial_number:
id = i
break
if id is not None:
print('HackId is: {0}'.format(id))
return str(id)
else:
print('Такого хака нет!')
class get_center_freq(gr.top_block):
def __init__(self):
gr.top_block.__init__(self, "get_center_freq")
##################################################
# Variables
##################################################
self.prob_freq = prob_freq = 0
self.top_peaks_amount = top_peaks_amount = 20
self.samp_rate = samp_rate = 20e6
self.poll_rate = poll_rate = 10000
self.num_points = num_points = 8192
self.flag = flag = 1
self.decimation = decimation = 1
self.center_freq = center_freq = my_freq.work(prob_freq)[0]
##################################################
# Blocks
##################################################
self.probSigVec = blocks.probe_signal_vc(4096)
self.rtlsdr_source_0 = osmosdr.source(
args="numchan=" + str(1) + " " + 'hackrf=' + get_hack_id()
)
self.rtlsdr_source_0.set_time_unknown_pps(osmosdr.time_spec_t())
self.rtlsdr_source_0.set_sample_rate(samp_rate)
self.rtlsdr_source_0.set_center_freq(center_freq, 0)
self.rtlsdr_source_0.set_freq_corr(0, 0)
self.rtlsdr_source_0.set_gain(16, 0)
self.rtlsdr_source_0.set_if_gain(16, 0)
self.rtlsdr_source_0.set_bb_gain(0, 0)
self.rtlsdr_source_0.set_antenna('', 0)
self.rtlsdr_source_0.set_bandwidth(0, 0)
self.rtlsdr_source_0.set_min_output_buffer(4096)
def _prob_freq_probe():
while True:
val = self.probSigVec.level()
try:
self.set_prob_freq(val)
except AttributeError:
pass
time.sleep(1.0 / (poll_rate))
_prob_freq_thread = threading.Thread(target=_prob_freq_probe)
_prob_freq_thread.daemon = True
_prob_freq_thread.start()
self.blocks_stream_to_vector_1 = blocks.stream_to_vector(gr.sizeof_gr_complex*1, 4096)
##################################################
# Connections
##################################################
self.connect((self.blocks_stream_to_vector_1, 0), (self.probSigVec, 0))
self.connect((self.rtlsdr_source_0, 0), (self.blocks_stream_to_vector_1, 0))
def get_prob_freq(self):
return self.prob_freq
def set_prob_freq(self, prob_freq):
self.prob_freq = prob_freq
self.set_center_freq(my_freq.work(self.prob_freq)[0])
def get_top_peaks_amount(self):
return self.top_peaks_amount
def set_top_peaks_amount(self, top_peaks_amount):
self.top_peaks_amount = top_peaks_amount
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self.rtlsdr_source_0.set_sample_rate(self.samp_rate)
def get_poll_rate(self):
return self.poll_rate
def set_poll_rate(self, poll_rate):
self.poll_rate = poll_rate
def get_num_points(self):
return self.num_points
def set_num_points(self, num_points):
self.num_points = num_points
def get_flag(self):
return self.flag
def set_flag(self, flag):
self.flag = flag
def get_decimation(self):
return self.decimation
def set_decimation(self, decimation):
self.decimation = decimation
def get_center_freq(self):
return self.center_freq
def set_center_freq(self, center_freq):
self.center_freq = center_freq
self.rtlsdr_source_0.set_center_freq(self.center_freq, 0)
def main(top_block_cls=get_center_freq, options=None):
time.sleep(3)
tb = top_block_cls()
def sig_handler(sig=None, frame=None):
tb.stop()
tb.wait()
sys.exit(0)
signal.signal(signal.SIGINT, sig_handler)
signal.signal(signal.SIGTERM, sig_handler)
tb.start()
try:
input('Press Enter to quit: ')
except EOFError:
pass
tb.wait()
if __name__ == '__main__':
main()

@ -0,0 +1,173 @@
from gnuradio import blocks, gr
import sys
import signal
import compose_send_data_915 as my_freq
import osmosdr
import time
import threading
import subprocess
import os
from common.runtime import load_root_env, resolve_hackrf_index
load_root_env(__file__)
def get_hack_id():
return resolve_hackrf_index('HACKID_915', 'orange_scripts/main_915.py')
serial_number = os.getenv('HACKID_915')
pos = None
output = []
try:
command = 'lsusb -v -d 1d50:6089 | grep iSerial'
output.append(subprocess.check_output(command, shell=True, text=True))
except subprocess.CalledProcessError as e:
print(f"Команда завершилась с кодом возврата {e.returncode}")
print(e)
print(output)
output_lines = output[0].strip().split('\n')
print(output_lines)
serial_numbers = [line.split()[-1] for line in output_lines]
print(serial_numbers)
for i, number in enumerate(serial_numbers):
if number == serial_number:
id = i
break
if id is not None:
print('HackId is: {0}'.format(id))
return str(id)
else:
print('Такого хака нет!')
class get_center_freq(gr.top_block):
def __init__(self):
gr.top_block.__init__(self, "get_center_freq")
##################################################
# Variables
##################################################
self.prob_freq = prob_freq = 0
self.top_peaks_amount = top_peaks_amount = 20
self.samp_rate = samp_rate = 20e6
self.poll_rate = poll_rate = 10000
self.num_points = num_points = 8192
self.flag = flag = 1
self.decimation = decimation = 1
self.center_freq = center_freq = my_freq.work(prob_freq)[0]
##################################################
# Blocks
##################################################
self.probSigVec = blocks.probe_signal_vc(4096)
self.rtlsdr_source_0 = osmosdr.source(
args="numchan=" + str(1) + " " + 'hackrf=' + get_hack_id()
)
self.rtlsdr_source_0.set_time_unknown_pps(osmosdr.time_spec_t())
self.rtlsdr_source_0.set_sample_rate(samp_rate)
self.rtlsdr_source_0.set_center_freq(center_freq, 0)
self.rtlsdr_source_0.set_freq_corr(0, 0)
self.rtlsdr_source_0.set_gain(16, 0)
self.rtlsdr_source_0.set_if_gain(16, 0)
self.rtlsdr_source_0.set_bb_gain(0, 0)
self.rtlsdr_source_0.set_antenna('', 0)
self.rtlsdr_source_0.set_bandwidth(0, 0)
self.rtlsdr_source_0.set_min_output_buffer(4096)
def _prob_freq_probe():
while True:
val = self.probSigVec.level()
try:
self.set_prob_freq(val)
except AttributeError:
pass
time.sleep(1.0 / (poll_rate))
_prob_freq_thread = threading.Thread(target=_prob_freq_probe)
_prob_freq_thread.daemon = True
_prob_freq_thread.start()
self.blocks_stream_to_vector_1 = blocks.stream_to_vector(gr.sizeof_gr_complex*1, 4096)
##################################################
# Connections
##################################################
self.connect((self.blocks_stream_to_vector_1, 0), (self.probSigVec, 0))
self.connect((self.rtlsdr_source_0, 0), (self.blocks_stream_to_vector_1, 0))
def get_prob_freq(self):
return self.prob_freq
def set_prob_freq(self, prob_freq):
self.prob_freq = prob_freq
self.set_center_freq(my_freq.work(self.prob_freq)[0])
def get_top_peaks_amount(self):
return self.top_peaks_amount
def set_top_peaks_amount(self, top_peaks_amount):
self.top_peaks_amount = top_peaks_amount
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self.rtlsdr_source_0.set_sample_rate(self.samp_rate)
def get_poll_rate(self):
return self.poll_rate
def set_poll_rate(self, poll_rate):
self.poll_rate = poll_rate
def get_num_points(self):
return self.num_points
def set_num_points(self, num_points):
self.num_points = num_points
def get_flag(self):
return self.flag
def set_flag(self, flag):
self.flag = flag
def get_decimation(self):
return self.decimation
def set_decimation(self, decimation):
self.decimation = decimation
def get_center_freq(self):
return self.center_freq
def set_center_freq(self, center_freq):
self.center_freq = center_freq
self.rtlsdr_source_0.set_center_freq(self.center_freq, 0)
def main(top_block_cls=get_center_freq, options=None):
time.sleep(3)
tb = top_block_cls()
def sig_handler(sig=None, frame=None):
tb.stop()
tb.wait()
sys.exit(0)
signal.signal(signal.SIGINT, sig_handler)
signal.signal(signal.SIGTERM, sig_handler)
tb.start()
try:
input('Press Enter to quit: ')
except EOFError:
pass
tb.wait()
if __name__ == '__main__':
main()

@ -0,0 +1,163 @@
import statistics
# Более лучшая версия кода есть в FRScanner
class DataBuffer:
"""
Класс с реализацией циклического буффера.
Атрибуты:
current_column: Указатель на текущий столбец буфера, который обновляем.
thinning_counter: Прореживающий множитель на текующей итерации.
current_counter: Указатель на количество чтений между последним обновлением столбца и предыдущим атрибутом.
num_of_thinning_iter: Прореживающий множитель. Раз в это количечество раз будет обнволяться столбец буфера.
line_size: Количество строк буфера = количеству каналов.
columns_size: Количество столбцов = фиксированное число.
multiply_factor: Прцоентный показатель превышения сигналом уровня шума. ex m_p = 1.1 => триггер, если
сигнал превышает шум на 10%.
num_for_alarm: Количество раз, превышающих шум, при которых триггеримся = фиксированное число.
is_init: Флаг инициализации буфера. = True, если инициализирован.
buffer: Массив для буфера.
buffer_medians: Массив для медиан столбцов букера.
buffer_alarms: Массив для количества тревог по столбца буфера.
"""
def __init__(self, columns_size, num_of_thinning_iter, num_of_channels, multiply_factor, num_for_alarm):
"""
Инициализируем класс.
:param columns_size:
:param num_of_thinning_iter:
:param num_of_channels:
:param multiply_factor:
:param num_for_alarm:
"""
self.current_column = 0
self.thinning_counter = 1
self.current_counter = 1
self.num_of_thinning_iter = num_of_thinning_iter
self.line_size = num_of_channels
self.columns_size = columns_size
self.multiply_factor = multiply_factor
self.num_for_alarm = num_for_alarm
self.is_init = False
self.buffer = [[0 for _ in range(self.columns_size)] for _ in range(self.line_size)]
self.buffer_medians = [0] * self.line_size
self.buffer_alarms = [0] * self.line_size
def get_buffer(self):
return self.buffer
def get_medians(self):
return self.buffer_medians
def get_alarms(self):
return self.buffer_alarms
def check_init(self):
return self.is_init
def print(self):
print('buffer is: ')
for i in range(self.line_size):
print(self.buffer[i], end=' ')
print()
def medians(self):
"""
Вычислить медиану по строке буфера.
:return: None
"""
if self.check_init():
for i in range(self.line_size):
self.buffer_medians[i] = statistics.median(self.buffer[i])
# print('meidans is: ', self.buffer_medians)
# return self.buffer_medians
def alarms_fill_zeros(self):
self.buffer_alarms = [0] * self.line_size
def update(self, data):
"""
Обновление буфера.
Если номер текущего чтения совпадает с количеством прореживающего множителя на текущем обновлении буфера, то
1. Обновляем буфер.
2. Двигаем курсор на след столбец. Если был последний столбец, то двигаем курсор в начало.
3. Берем медианы по буферу, если он уже проиницализирован.
4. Сбрасываем счетчик текущих чтений.
5. Если был последний столбец (и мы уже переключились на первый), то
Если прореживающий множитель на текующей итерации был единица, то мы иницилизировались
До тех пор, пока множитель на итерации меньше фиксированного, увеличиваем в два раза.
В противном случае - увеличиваем номер чтения.
:param data: Массив с метриками сигнала по каналам.
:return: None
"""
# TODO: Добавить время релаксации - если система затриггерилась, то перестать обновлять буфер на N чтений,
# где N задается в .env-template. Сейчас есть бага, что буфер перестает обновляться только когда система
# триггерится. Между тем, когда приходит аларм и num_for_alarm, когда сигнал алармовский, но система еще не
# триггерится, буфер продолжает обновляться. В таких условиях буфер может набрать в себя алармовских сигналов
# и повысить пороги. Пример такой ситуации: дрон висит на 1км, система его видит, но сигнал превышает порог раз
# через раз и аларм срабатывает не всегда. В таких условиях наберется высокий сигнал, повысятся пороги и когда
# дрон начнет движение вперед, он будет заметен на более низкой дистанции, чем обычно, так как пороги повышены.
if self.current_counter == self.thinning_counter:
for i in range(self.line_size):
self.buffer[i][self.current_column] = data[i]
self.current_column = (self.current_column + 1) % self.columns_size
#print('Столбец {0} обновлен. Перешли к столбцу {1}: '.format(self.current_column - 1, self.current_column))
self.medians()
self.current_counter = 1
if self.current_column == 0:
if self.thinning_counter == 1:
self.is_init = True
self.medians()
print('Начальная калибровка завершена.')
if self.thinning_counter < self.num_of_thinning_iter:
self.thinning_counter *= 2
# print('thinning counter обновлен: ', self.thinning_counter)
else:
self.current_counter += 1
# print('curr counter обновлен: ', self.current_counter)
def check_alarm(self, data):
"""
Проверка триггера системы.
Если значение по каналу превышает медиану (порог) шума на какой-то процент, то инкремент буфер аларма по каналу.
Превышение num_for_alarm подряд - триггер. Если после n превышений, где n<num_for_alarm приходит сигнал не
первышающий порог, то сбрасываем буфер алармов.
:param data:
:return: Да/нет.
"""
if self.check_init():
for i in range(len(data)):
exceeding = data[i] > self.multiply_factor * self.buffer_medians[i]
print(data[i]/self.buffer_medians[i])
if exceeding:
self.buffer_alarms[i] += 1
# print('Инкремент буффер алармов по каналу {0}, текущее число по этому каналу: {1}'.format(i,self.buffer_alarms[i]))
else:
self.buffer_alarms[i] = 0
# print('Обнулили буффер алармов по каналу {0}, текущее число по этому каналу: {1}'.format(i,self.buffer_alarms[i]))
if self.buffer_alarms[i] >= self.num_for_alarm:
# print('Сработала тревога по каналу {0}, текущее число по этому каналу: {1}'.format(i,self.buffer_alarms[i]))
self.buffer_alarms = [0] * self.line_size
return True
return False
def check_single_alarm(self, median, cur_channel):
"""
Проверка, является ли текущая медиана по каналу превышающей порог.
:param median: меди (хар-ка) по каналу.
:param cur_channel: индекс канала внутри частоты.
:return: Да/нет.
"""
if self.check_init():
exceeding = median > self.multiply_factor * self.buffer_medians[cur_channel]
print(median/self.buffer_medians[cur_channel])
if exceeding:
return True
else:
return False

@ -0,0 +1,168 @@
import os
from core.data_buffer import DataBuffer
def get_centre_freq(freq):
"""
Получить название частоты по ее диапазону.
:param freq: Частота, которую обрабатываем.
:return: Название частоты.
"""
c_freq = 0
if 5.46e9 <= freq <= 6.0e9:
c_freq = 5800
if 5.0e9 <= freq <= 5.4e9:
c_freq = 5200
if 4.5e9 <= freq <= 4.7e9:
c_freq = 4500
if 3.3e9 <= freq <= 3.5e9:
c_freq = 3300
if 2.4e9 <= freq <= 2.5e9:
c_freq = 2400
if 1e9 <= freq <= 1.36e9:
c_freq = 1200
if 0.9e9 <= freq <= 0.960e9:
c_freq = 915
if 0.830e9 <= freq <= 0.890e9:
c_freq = 868
if 0.700e9 <= freq <= 0.780e9:
c_freq = 750
if 0.380e9 <= freq <= 0.500e9:
c_freq = 433
return str(c_freq)
class MultiChannel:
"""
Класс с реализацией переключателя каналов. Присутствует поддержка нескольких частот, а поэтому
Атрибуты:
steps: Массив шагов для разных частот. Ex. steps = [-20e6, -5e6, -3e6], i-ый элемент соответствует i-ой
частоте для обработке, типа 1.2, 915 и 868.
bases: Массив верхних границ диапазонов рассматриваемых частот. Ex bases = [1.36e9, 0.93e9, 0.87e9] для
1.2, 915 и 868.
roofs: То же самое, только нижних границ. Ex roofs = [1e9, 0.9e9, 0.85e9]
cur_channel: Указатель на текущий канал, который обрабатываем.
cur_roof: Указатель на нижнюю границу текущей обрабатываемой частоты.
cur_step: Указатель на шаг текущей обрабатываемой частоты.
num_chs: Массив из каналов по обрабатываемым частотам. Вычисляется автоматически исходя из границ и шага.
init_freq: Чекер на инициализацию частоты перед началом работы скрипта. Нужен из-за особенности
работы графов GNURadio и функции work в embedded Python блоке.
DB: Список из циклических буферов для соответствующих чатсот.
"""
def __init__(self, steps, bases, roofs):
"""
Инициализация класса.
:param steps: Список с шагами для соответствующих частот.
:param bases: Список верхних границ диапазонов частот, с которыми работаем.
:param roofs: Список нижних границ --//--.
"""
self.steps = steps
self.bases = bases
self.roofs = roofs
self.cur_channel = self.bases[0]
self.cur_roof = self.roofs[0]
self.cur_step = self.steps[0]
self.num_chs = []
self.init_freq = False
self.DB = []
def init_f(self):
"""
Инициализация начальной частоты, с которой начинаем обработку.
:return: Верхняя граница первой частоты из набора частот.
"""
self.init_freq = True
return self.bases[0]
def get_cur_channel(self):
"""
Получить текущий обрабатываемый канал.
:return: Канал обработки.
"""
return self.cur_channel
def change_channel(self):
"""
Функция смены канала. Идет от верхней границы диапазона частоты к нижней с шагом step. Если дошли до нижней
границы, то переключаемся на следующую частоту посредством переноса курсора текущего канала на верхнюю границу
новой частоты и указатель нижней границы также двигаем на следующую позицию. Если частота для обработки одна, то
указатель текущего канала возвращается в начало - верхней границы этой же частоты. Указатель нижней границы не
изменяется.
:return: Канал после смены.
"""
if not self.init_freq:
return self.init_f()
if self.cur_channel <= self.cur_roof:
if self.cur_roof == self.roofs[-1]:
self.cur_channel = self.bases[0]
self.cur_roof = self.roofs[0]
self.cur_step = self.steps[0]
else:
next_roofs = self.roofs.index(self.cur_roof) + 1
self.cur_channel = self.bases[next_roofs]
self.cur_roof = self.roofs[next_roofs]
self.cur_step = self.steps[next_roofs]
else:
self.cur_channel += self.cur_step
# print('Канал частоты изменен на ', self.cur_channel / 1000000)
return self.get_cur_channel()
def get_num_chs(self, idx_freq):
"""
Вычисляет количество каналов на частоте исходя из верхнего, нижнего диапазонов и шага.
:param idx_freq: id частоты внутри класса. Т.е. в данный момент обрабатывается несколько частот, то id =
индексу верхней границы в bases для данной частоты, или нижней границы в roofs или шагу в steps.
В примерах из описания атрибутов индекс частоты 915 будет равен единице (т.к. идет вторым элементом в списках).
:return: Количество каналов.
"""
if (idx_freq + 1) > len(self.num_chs):
tmp = self.bases[idx_freq]
counter = 0
while tmp >= self.roofs[idx_freq]:
counter += 1
tmp += self.steps[idx_freq]
self.num_chs.append(counter)
return counter
else:
return self.num_chs[idx_freq]
def check_f(self, freq):
"""
Проверить наличие частоты в классе. Если да, то вернуть количество каналов и циклический буфер этой частоты.
:param freq: Частота.
:return: Количество каналов, циклический буфер выбранной частоты ИЛИ none.
"""
for i in range(len(self.bases)):
if self.roofs[i] <= freq <= self.bases[i]:
return self.get_num_chs(i), self.DB[i]
else:
return None, None
def fill_DB(self):
"""
Инициализировать циклические буферы для всех частот в отдельный список.
:return: N0nE.
"""
for i in range(len(self.bases)):
freq = get_centre_freq(self.bases[i])
buffer_columns_size = int(os.getenv('buffer_columns_size_' + str(freq)))
num_of_thinning_iter = int(os.getenv('num_of_thinning_iter_' + str(freq)))
multiply_factor = float(os.getenv('multiply_factor_' + str(freq)))
num_for_alarm = int(os.getenv('num_for_alarm_' + str(freq)))
num_chs = self.get_num_chs(i)
self.DB.append(
DataBuffer(buffer_columns_size, num_of_thinning_iter, num_chs, multiply_factor, num_for_alarm))
def db_alarms_zeros(self, circle_buffer):
"""
При отработке системы зануляет алармы во всех буферах, кроме текущего, т.к. в текущем уже занулилось.
:param circle_buffer: Циклический буфер текущей обрабатываемой частоты.
:return: None.
"""
for i in range(len(self.DB)):
if self.DB[i] != circle_buffer:
self.DB[i].alarms_fill_zeros()

@ -0,0 +1,107 @@
import os
import numpy as np
from typing import Union
from common.runtime import load_root_env
load_root_env(__file__)
def get_signal_length(freq):
length = int(os.getenv('signal_length_' + str(freq)))
return length
class Signal:
"""
Класс сбора и предобработки сигнала.
Атрибуты:
length: Длина сигнала.
signal: Массив, в который собираем сигнал.
"""
def __init__(self, conv_method='average'):
self.conv_method = conv_method
self.signal = []
self.signal_abs = []
def get_signal(self):
"""
Возвращает собранный сигнал.
:return: Массив с сигналом.
"""
return self.signal, self.signal_abs
def clear(self) -> None:
"""
Очистить массив с сигналом после предобработки?
:return: None
"""
self.signal = []
self.signal_abs = []
def signal_preprocessing(self, length) -> float:
"""
Предобработка сигнала.
:return: Число типа float - "характеристика сигнала".
"""
signal = np.array([self.signal.real[0:length], self.signal.imag[0:length]], dtype=np.float32)
signal_abs = np.linalg.norm(signal, axis=0) # Поэлементный модуль комплексного числа. shape.result
# (1, self.length)
if self.conv_method == 'max':
result = np.max(signal_abs)
else:
result = np.median(signal_abs)
self.signal = signal
self.signal_abs = signal_abs
return result
def fill_signal(self, lvl, length) -> Union[int, float]:
"""
Сбор сигнала в соответствующий массив. Если уже собран, то предобработка.
:param lvl: Массив, без ограничения общности, с неизвестной длиной, содержащий сигнал.
:param length:
:return: 0 - если еще нет нужного количества сигнала, "характеристика" иначе.
"""
if len(self.signal) <= length:
y = np.array(lvl).ravel()
self.signal = np.concatenate((self.signal, y), axis=None)
return 0
else:
preproc_signal = self.signal_preprocessing(length)
#self.clear()
return preproc_signal
class SignalsArray:
"""
Класс для сохранения медиан сигналов на частотах.
Атрибуты:
sig_array: Список для сохранения медиан.
counter: Индикатор наполненности массива.
"""
def __init__(self):
self.sig_array = []
self.counter = 0
def fill_sig_arr(self, metrica, num_chs=3):
"""
Аппендим характеристику сигнала (метрику) в массив длиной num_chs.
:param metrica: Характеристика сигнала (метрика).
:param num_chs: Количество каналов на частоте.
:return: Индекс канала внутри частоты и массив с характеристиками, если заполнен, иначе - пустой.
"""
if num_chs:
if self.counter < num_chs:
self.sig_array.append(metrica)
self.counter += 1
if self.counter == num_chs:
arr = self.sig_array
self.sig_array = []
self.counter = 0
return num_chs - 1, arr
else:
return self.counter - 1, []
else:
return 0, []

@ -0,0 +1,44 @@
class Spectrum:
"""
Класс для работы с "характеристиками" сигнала (предобработанным сигналом).
Атрибуты:
freqs: Список (массив) частот, который проходим.
spec_elems: Список соответсвующих "характеристик" для каждой частоты.
"""
def __init__(self, freqs: list):
"""
Инициализирует новый класс спектра.
:param freqs: Список частот.
"""
self.freqs = freqs
self.spec_elems = []
def get_freqs(self) -> list:
"""
Возвращает список частот.
:return: Массив с частотами.
"""
return self.freqs
def get_spectrum(self) -> list:
"""
Возвращает собранный спектр.
:return: Массив с характеристиками сигнала.
"""
return self.spec_elems
def add(self, elem: float) -> None:
"""
Добавляет характеристику в спектр.
:return: None.
"""
self.spec_elems.append(elem)
def clear(self) -> None:
"""
Очистить собранный спектр.
:return: None.
"""
self.spec_elems = []

@ -0,0 +1,167 @@
import gc
import os
import copy
from typing import Tuple
import matplotlib.cm as cm
import matplotlib.pyplot as plt
import matplotlib.colors as mcolors
import DroneScanner.utils.utils as utils
from matplotlib.ticker import AutoMinorLocator
class Waterfall:
"""
Класс для вывода графика типа "Водопад"
Атрибуты:
_freqs: Список частот.
_delay: Количество чтений, после которых обновляем водопад.
_size: Размер водопада по оси Y.
_counter_til_init: Счетчик от _size до 0. Отвечает за инициализацию сolors и valuest_to_plot.
_skip_update_counter:
_points_scale: Масштаб точек графика в зависимости от количества частот.
debug_flag: Флаг для отладки класса.
_save_plots:
_cur_plot_file_idx:
_max_plot_files:
_dir_to_save:
colors: Массив цветов точек графика.
values_to_plot: Значения по оси Y.
"""
def __init__(self, freqs: list, delay: int, size: int, debug_flag: bool, save_plots=False, max_plot_files=1000,
dir_to_save='/home/orangepi/plots'):
"""
Инициализирует новый водопад.
:param freqs: Список частот.
:param delay: Количество чтений, после которых обновляем водопад.
:param size: Размер водопада по оси Y.
:param debug_flag: Флаг для отладки класса.
:param save_plots:
:param max_plot_files:
:param dir_to_save:
"""
self._freqs = freqs
self._delay = delay
self._size = size
self._counter_til_init = size - 1
self._skip_update_counter = 1
self._points_scale = 500 / len(self._freqs)
self.debug_flag = debug_flag
self._save_plots = save_plots
self._cur_plot_file_idx = 0
self._max_plot_files = max_plot_files
self._dir_to_save = dir_to_save
self.colors = []
self.values_to_plot = []
if not os.path.exists(self._dir_to_save):
os.makedirs(self._dir_to_save)
def interpolate_color(self, start_color: tuple, end_color: tuple, factor: float) -> tuple:
"""
Интерполяция цвета между start_color и end_color в зависимости от factor.
Чем больше factor, тем ближе end_color, меньше - ближе к start_color.
:param start_color: Тройка (R, G, B), где R, G, B из 0..255.
:param end_color: Тройка (R, G, B), где R, G, B из 0..255.
:param factor: Число от 0 до 1.
:return: Тройка (a, b, c), где 0 <= a,b,c => 1
"""
return tuple((start + (end - start) * factor) / 255 for start, end in zip(start_color, end_color))
def decorate(self, data: list) -> list:
"""
По строчке с датой строим строчку соответствующих цветов в зависимости от элементов из data.
:param data: Строка длиной len(freqs) с характеристиками сигнала (предобработанным сигналом).
:return: Строка длиной len(freqs) с тройками, соответствующие некоторым цветам из (R, G, B).
"""
green = (0, 255, 0)
red = (255, 0, 0)
colored_data = []
for elem in data:
colored_data.append(self.interpolate_color(green, red, elem))
return colored_data
def transform_value_structure(self) -> Tuple[list, list, list]:
"""
Приведение одномерного freqs и двумерных values_to_plot и colors в одномерные длиной len(freqs) x size
для построения по ним водопада.
:return: Координаты точек по х и y и соответствующие цвета, в которые нужно покрасить точки.
"""
x = []
y = []
z = []
for i in range(utils.get_num_columns_of_array(self.values_to_plot)):
for j in range(utils.get_num_rows_of_array(self.values_to_plot) - 1, -1, -1):
x.append(self._freqs[i])
y.append(self.values_to_plot[j][i])
z.append(self.colors[j][i])
return x, y, z
def plot(self) -> None:
"""
Построение водопада при помощи plt.scatter по x,y, с=z, s=_points_scale и автомасштабируемой сеткой.
:return: None.
"""
x, y, colors = self.transform_value_structure()
# colors_hex = [mcolors.to_hex(color) for color in colors]
plt.scatter(x, y, c=colors, s=self._points_scale, marker='s')
# Добавление цветовой шкалы.
plt.colorbar(cm.ScalarMappable(norm=mcolors.Normalize(vmin=0, vmax=1), cmap='RdYlGn_r'), label='Значение')
# Настройка ограничения по ОY
plt.ylim(-self._size / 10, (self._size - 1) + (self._size / 10))
# Настройка заголовков
plt.xlabel('Частоты')
plt.ylabel('Итерации')
plt.title('Waterfall')
# Добавление автомасштабируемой сетки.
plt.grid(which='major', color='gray', linestyle='-', linewidth=0.5)
plt.minorticks_on()
plt.gca().xaxis.set_minor_locator(AutoMinorLocator())
plt.gca().yaxis.set_minor_locator(AutoMinorLocator())
plt.grid(which='minor', color='gray', linestyle=':', linewidth=0.5)
if self._save_plots:
plt.savefig(f'{self._dir_to_save}/waterfall_{self._cur_plot_file_idx}.png')
if self._cur_plot_file_idx == self._max_plot_files:
self._cur_plot_file_idx = 0
else:
self._cur_plot_file_idx += 1
else:
plt.show()
plt.pause(0.001)
plt.close()
gc.collect()
def update(self, data: list) -> None:
"""
Конструирование массивов values_to_plot (значения по Y) и colors (цвета точек) с последующим
обновлением colors (см. документацию).
:param data:
:return: None.
"""
if self._skip_update_counter == self._delay:
data_data = copy.deepcopy(data)
colored_data = self.decorate(data_data)
if self._counter_til_init != -1:
self.values_to_plot.append([self._counter_til_init for _ in range(len(self._freqs))])
self._counter_til_init -= 1
else:
self.colors.pop()
self.colors.insert(0, colored_data)
self.plot()
self._skip_update_counter = 1
else:
self._skip_update_counter += 1

@ -0,0 +1,120 @@
import os
import datetime
from common.runtime import load_root_env, validate_env, as_bool, as_str
from smb.SMBConnection import SMBConnection
from utils.datas_processing import pack_elems, agregator, send_data, save_data, remote_save_data
from core.sig_n_medi_collect import Signal, SignalsArray, get_signal_length
from core.multichannelswitcher import MultiChannel, get_centre_freq
load_root_env(__file__)
freq_suffix = os.path.splitext(os.path.basename(__file__))[0].split("_")[-1]
validate_env(__file__, {
"send_to_module_flag": as_bool,
"save_data_flag": as_bool,
"elems_to_save": as_str,
"file_types_to_save": as_str,
"lochost": as_str,
"locport": as_str,
"freq_endpoint": as_str,
"path_to_save_medians": as_str,
"path_to_save_alarms": as_str,
"module_name": as_str,
f"f_step_{freq_suffix}": as_str,
f"f_bases_{freq_suffix}": as_str,
f"f_roofs_{freq_suffix}": as_str,
})
debug_flag = as_bool(os.getenv('debug_flag', '0'))
send_to_module_flag = as_bool(os.getenv('send_to_module_flag', '0'))
save_data_flag = as_bool(os.getenv('save_data_flag', '0'))
module_name = os.getenv('module_name')
elems_to_save = os.getenv('elems_to_save')
file_types_to_save = os.getenv('file_types_to_save')
localhost = os.getenv('lochost')
localport = os.getenv('locport')
f_step = [*map(float, os.getenv('f_step_3300').split())]
f_bases = [*map(float, os.getenv('f_bases_3300').split())]
f_roofs = [*map(float, os.getenv('f_roofs_3300').split())]
path_to_save_medians = os.getenv('path_to_save_medians')
path_to_save_alarms = os.getenv('path_to_save_alarms')
smb_host = os.getenv('smb_host')
smb_port = os.getenv('smb_port')
smb_user = os.getenv('smb_user')
smb_pass = os.getenv('smb_pass')
shared_folder = os.getenv('shared_folder')
the_pc_name = os.getenv('the_pc_name')
remote_pc_name = os.getenv('remote_pc_name')
smb_domain = os.getenv('smb_domain')
freq_endpoint = os.getenv('freq_endpoint')
elems_to_save = elems_to_save.split(',')
file_types_to_save = file_types_to_save.split(',')
tmp_signal = Signal()
tmp_sigs_array = SignalsArray()
multi_channel = MultiChannel(f_step, f_bases, f_roofs)
f = multi_channel.init_f()
multi_channel.fill_DB()
if debug_flag:
conn = SMBConnection(smb_user, smb_pass, the_pc_name, remote_pc_name, use_ntlm_v2=True)
conn.connect(smb_host, 139)
filelist = conn.listPath(shared_folder, '/')
print(filelist)
def work(lvl):
f = multi_channel.get_cur_channel()
freq = get_centre_freq(f)
signal_length = get_signal_length(freq)
median = tmp_signal.fill_signal(lvl, signal_length)
if median:
try:
num_chs, circle_buffer = multi_channel.check_f(f)
cur_channel, sigs_array = tmp_sigs_array.fill_sig_arr(median, num_chs)
if sigs_array:
print('Значения на {0}: {1}'.format(freq, sigs_array))
print('Пороги: ', circle_buffer.get_medians())
alarm = circle_buffer.check_alarm(sigs_array)
if alarm:
print('----ALARM---- ', freq)
multi_channel.db_alarms_zeros(circle_buffer)
else:
circle_buffer.update(sigs_array)
if send_to_module_flag:
send_data(agregator(freq, alarm), localhost, localport, freq_endpoint)
if save_data_flag:
if not circle_buffer.check_init() and circle_buffer.current_column - 1 == 0:
save_data(path_to_save_medians, freq, 'DateTime', 'ALARM', 'max signal', list(range(num_chs)),
list(range(num_chs)))
if circle_buffer.check_init():
save_data(path_to_save_medians, freq, datetime.datetime.now(), alarm, max(sigs_array), sigs_array,
circle_buffer.get_medians())
if debug_flag:
single_alarm = circle_buffer.check_single_alarm(median, cur_channel)
print(cur_channel, single_alarm)
if single_alarm:
data = pack_elems(elems_to_save, file_types_to_save, tmp_signal.get_signal())
print('SAVE CURRENT SIGNAL SROCHNO TI MENYA SLISHISH?!?!?!?')
try:
remote_save_data(conn, data, module_name, freq, shared_folder, path_to_save_alarms)
except Exception as e:
print(f"Ошибка: {e}")
else:
print('VSE OKI DOKI SIGNAL SOKHRANYAT NE NUZHNO!!!')
f = multi_channel.change_channel()
except Exception as e:
print(str(e))
print(".", end='')
tmp_signal.clear()
return f

@ -0,0 +1,120 @@
import os
import datetime
from common.runtime import load_root_env, validate_env, as_bool, as_str
from smb.SMBConnection import SMBConnection
from utils.datas_processing import pack_elems, agregator, send_data, save_data, remote_save_data
from core.sig_n_medi_collect import Signal, SignalsArray, get_signal_length
from core.multichannelswitcher import MultiChannel, get_centre_freq
load_root_env(__file__)
freq_suffix = os.path.splitext(os.path.basename(__file__))[0].split("_")[-1]
validate_env(__file__, {
"send_to_module_flag": as_bool,
"save_data_flag": as_bool,
"elems_to_save": as_str,
"file_types_to_save": as_str,
"lochost": as_str,
"locport": as_str,
"freq_endpoint": as_str,
"path_to_save_medians": as_str,
"path_to_save_alarms": as_str,
"module_name": as_str,
f"f_step_{freq_suffix}": as_str,
f"f_bases_{freq_suffix}": as_str,
f"f_roofs_{freq_suffix}": as_str,
})
debug_flag = as_bool(os.getenv('debug_flag', '0'))
send_to_module_flag = as_bool(os.getenv('send_to_module_flag', '0'))
save_data_flag = as_bool(os.getenv('save_data_flag', '0'))
module_name = os.getenv('module_name')
elems_to_save = os.getenv('elems_to_save')
file_types_to_save = os.getenv('file_types_to_save')
localhost = os.getenv('lochost')
localport = os.getenv('locport')
f_step = [*map(float, os.getenv('f_step_433').split())]
f_bases = [*map(float, os.getenv('f_bases_433').split())]
f_roofs = [*map(float, os.getenv('f_roofs_433').split())]
path_to_save_medians = os.getenv('path_to_save_medians')
path_to_save_alarms = os.getenv('path_to_save_alarms')
smb_host = os.getenv('smb_host')
smb_port = os.getenv('smb_port')
smb_user = os.getenv('smb_user')
smb_pass = os.getenv('smb_pass')
shared_folder = os.getenv('shared_folder')
the_pc_name = os.getenv('the_pc_name')
remote_pc_name = os.getenv('remote_pc_name')
smb_domain = os.getenv('smb_domain')
freq_endpoint = os.getenv('freq_endpoint')
elems_to_save = elems_to_save.split(',')
file_types_to_save = file_types_to_save.split(',')
tmp_signal = Signal()
tmp_sigs_array = SignalsArray()
multi_channel = MultiChannel(f_step, f_bases, f_roofs)
f = multi_channel.init_f()
multi_channel.fill_DB()
if debug_flag:
conn = SMBConnection(smb_user, smb_pass, the_pc_name, remote_pc_name, use_ntlm_v2=True)
conn.connect(smb_host, 139)
filelist = conn.listPath(shared_folder, '/')
print(filelist)
def work(lvl):
f = multi_channel.get_cur_channel()
freq = get_centre_freq(f)
signal_length = get_signal_length(freq)
median = tmp_signal.fill_signal(lvl, signal_length)
if median:
try:
num_chs, circle_buffer = multi_channel.check_f(f)
cur_channel, sigs_array = tmp_sigs_array.fill_sig_arr(median, num_chs)
if sigs_array:
print('Значения на {0}: {1}'.format(freq, sigs_array))
print('Пороги: ', circle_buffer.get_medians())
alarm = circle_buffer.check_alarm(sigs_array)
if alarm:
print('----ALARM---- ', freq)
multi_channel.db_alarms_zeros(circle_buffer)
else:
circle_buffer.update(sigs_array)
if send_to_module_flag:
send_data(agregator(freq, alarm), localhost, localport, freq_endpoint)
if save_data_flag:
if not circle_buffer.check_init() and circle_buffer.current_column - 1 == 0:
save_data(path_to_save_medians, freq, 'DateTime', 'ALARM', 'max signal', list(range(num_chs)),
list(range(num_chs)))
if circle_buffer.check_init():
save_data(path_to_save_medians, freq, datetime.datetime.now(), alarm, max(sigs_array), sigs_array,
circle_buffer.get_medians())
if debug_flag:
single_alarm = circle_buffer.check_single_alarm(median, cur_channel)
print(cur_channel, single_alarm)
if single_alarm:
data = pack_elems(elems_to_save, file_types_to_save, tmp_signal.get_signal())
print('SAVE CURRENT SIGNAL SROCHNO TI MENYA SLISHISH?!?!?!?')
try:
remote_save_data(conn, data, module_name, freq, shared_folder, path_to_save_alarms)
except Exception as e:
print(f"Ошибка: {e}")
else:
print('VSE OKI DOKI SIGNAL SOKHRANYAT NE NUZHNO!!!')
f = multi_channel.change_channel()
except Exception as e:
print(str(e))
print(".", end='')
tmp_signal.clear()
return f

@ -0,0 +1,120 @@
import os
import datetime
from common.runtime import load_root_env, validate_env, as_bool, as_str
from smb.SMBConnection import SMBConnection
from utils.datas_processing import pack_elems, agregator, send_data, save_data, remote_save_data
from core.sig_n_medi_collect import Signal, SignalsArray, get_signal_length
from core.multichannelswitcher import MultiChannel, get_centre_freq
load_root_env(__file__)
freq_suffix = os.path.splitext(os.path.basename(__file__))[0].split("_")[-1]
validate_env(__file__, {
"send_to_module_flag": as_bool,
"save_data_flag": as_bool,
"elems_to_save": as_str,
"file_types_to_save": as_str,
"lochost": as_str,
"locport": as_str,
"freq_endpoint": as_str,
"path_to_save_medians": as_str,
"path_to_save_alarms": as_str,
"module_name": as_str,
f"f_step_{freq_suffix}": as_str,
f"f_bases_{freq_suffix}": as_str,
f"f_roofs_{freq_suffix}": as_str,
})
debug_flag = as_bool(os.getenv('debug_flag', '0'))
send_to_module_flag = as_bool(os.getenv('send_to_module_flag', '0'))
save_data_flag = as_bool(os.getenv('save_data_flag', '0'))
module_name = os.getenv('module_name')
elems_to_save = os.getenv('elems_to_save')
file_types_to_save = os.getenv('file_types_to_save')
localhost = os.getenv('lochost')
localport = os.getenv('locport')
f_step = [*map(float, os.getenv('f_step_4500').split())]
f_bases = [*map(float, os.getenv('f_bases_4500').split())]
f_roofs = [*map(float, os.getenv('f_roofs_4500').split())]
path_to_save_medians = os.getenv('path_to_save_medians')
path_to_save_alarms = os.getenv('path_to_save_alarms')
smb_host = os.getenv('smb_host')
smb_port = os.getenv('smb_port')
smb_user = os.getenv('smb_user')
smb_pass = os.getenv('smb_pass')
shared_folder = os.getenv('shared_folder')
the_pc_name = os.getenv('the_pc_name')
remote_pc_name = os.getenv('remote_pc_name')
smb_domain = os.getenv('smb_domain')
freq_endpoint = os.getenv('freq_endpoint')
elems_to_save = elems_to_save.split(',')
file_types_to_save = file_types_to_save.split(',')
tmp_signal = Signal()
tmp_sigs_array = SignalsArray()
multi_channel = MultiChannel(f_step, f_bases, f_roofs)
f = multi_channel.init_f()
multi_channel.fill_DB()
if debug_flag:
conn = SMBConnection(smb_user, smb_pass, the_pc_name, remote_pc_name, use_ntlm_v2=True)
conn.connect(smb_host, 139)
filelist = conn.listPath(shared_folder, '/')
print(filelist)
def work(lvl):
f = multi_channel.get_cur_channel()
freq = get_centre_freq(f)
signal_length = get_signal_length(freq)
median = tmp_signal.fill_signal(lvl, signal_length)
if median:
try:
num_chs, circle_buffer = multi_channel.check_f(f)
cur_channel, sigs_array = tmp_sigs_array.fill_sig_arr(median, num_chs)
if sigs_array:
print('Значения на {0}: {1}'.format(freq, sigs_array))
print('Пороги: ', circle_buffer.get_medians())
alarm = circle_buffer.check_alarm(sigs_array)
if alarm:
print('----ALARM---- ', freq)
multi_channel.db_alarms_zeros(circle_buffer)
else:
circle_buffer.update(sigs_array)
if send_to_module_flag:
send_data(agregator(freq, alarm), localhost, localport, freq_endpoint)
if save_data_flag:
if not circle_buffer.check_init() and circle_buffer.current_column - 1 == 0:
save_data(path_to_save_medians, freq, 'DateTime', 'ALARM', 'max signal', list(range(num_chs)),
list(range(num_chs)))
if circle_buffer.check_init():
save_data(path_to_save_medians, freq, datetime.datetime.now(), alarm, max(sigs_array), sigs_array,
circle_buffer.get_medians())
if debug_flag:
single_alarm = circle_buffer.check_single_alarm(median, cur_channel)
print(cur_channel, single_alarm)
if single_alarm:
data = pack_elems(elems_to_save, file_types_to_save, tmp_signal.get_signal())
print('SAVE CURRENT SIGNAL SROCHNO TI MENYA SLISHISH?!?!?!?')
try:
remote_save_data(conn, data, module_name, freq, shared_folder, path_to_save_alarms)
except Exception as e:
print(f"Ошибка: {e}")
else:
print('VSE OKI DOKI SIGNAL SOKHRANYAT NE NUZHNO!!!')
f = multi_channel.change_channel()
except Exception as e:
print(str(e))
print(".", end='')
tmp_signal.clear()
return f

@ -0,0 +1,120 @@
import os
import datetime
from common.runtime import load_root_env, validate_env, as_bool, as_str
from smb.SMBConnection import SMBConnection
from utils.datas_processing import pack_elems, agregator, send_data, save_data, remote_save_data
from core.sig_n_medi_collect import Signal, SignalsArray, get_signal_length
from core.multichannelswitcher import MultiChannel, get_centre_freq
load_root_env(__file__)
freq_suffix = os.path.splitext(os.path.basename(__file__))[0].split("_")[-1]
validate_env(__file__, {
"send_to_module_flag": as_bool,
"save_data_flag": as_bool,
"elems_to_save": as_str,
"file_types_to_save": as_str,
"lochost": as_str,
"locport": as_str,
"freq_endpoint": as_str,
"path_to_save_medians": as_str,
"path_to_save_alarms": as_str,
"module_name": as_str,
f"f_step_{freq_suffix}": as_str,
f"f_bases_{freq_suffix}": as_str,
f"f_roofs_{freq_suffix}": as_str,
})
debug_flag = as_bool(os.getenv('debug_flag', '0'))
send_to_module_flag = as_bool(os.getenv('send_to_module_flag', '0'))
save_data_flag = as_bool(os.getenv('save_data_flag', '0'))
module_name = os.getenv('module_name')
elems_to_save = os.getenv('elems_to_save')
file_types_to_save = os.getenv('file_types_to_save')
localhost = os.getenv('lochost')
localport = os.getenv('locport')
f_step = [*map(float, os.getenv('f_step_5200').split())]
f_bases = [*map(float, os.getenv('f_bases_5200').split())]
f_roofs = [*map(float, os.getenv('f_roofs_5200').split())]
path_to_save_medians = os.getenv('path_to_save_medians')
path_to_save_alarms = os.getenv('path_to_save_alarms')
smb_host = os.getenv('smb_host')
smb_port = os.getenv('smb_port')
smb_user = os.getenv('smb_user')
smb_pass = os.getenv('smb_pass')
shared_folder = os.getenv('shared_folder')
the_pc_name = os.getenv('the_pc_name')
remote_pc_name = os.getenv('remote_pc_name')
smb_domain = os.getenv('smb_domain')
freq_endpoint = os.getenv('freq_endpoint')
elems_to_save = elems_to_save.split(',')
file_types_to_save = file_types_to_save.split(',')
tmp_signal = Signal()
tmp_sigs_array = SignalsArray()
multi_channel = MultiChannel(f_step, f_bases, f_roofs)
f = multi_channel.init_f()
multi_channel.fill_DB()
if debug_flag:
conn = SMBConnection(smb_user, smb_pass, the_pc_name, remote_pc_name, use_ntlm_v2=True)
conn.connect(smb_host, 139)
filelist = conn.listPath(shared_folder, '/')
print(filelist)
def work(lvl):
f = multi_channel.get_cur_channel()
freq = get_centre_freq(f)
signal_length = get_signal_length(freq)
median = tmp_signal.fill_signal(lvl, signal_length)
if median:
try:
num_chs, circle_buffer = multi_channel.check_f(f)
cur_channel, sigs_array = tmp_sigs_array.fill_sig_arr(median, num_chs)
if sigs_array:
print('Значения на {0}: {1}'.format(freq, sigs_array))
print('Пороги: ', circle_buffer.get_medians())
alarm = circle_buffer.check_alarm(sigs_array)
if alarm:
print('----ALARM---- ', freq)
multi_channel.db_alarms_zeros(circle_buffer)
else:
circle_buffer.update(sigs_array)
if send_to_module_flag:
send_data(agregator(freq, alarm), localhost, localport, freq_endpoint)
if save_data_flag:
if not circle_buffer.check_init() and circle_buffer.current_column - 1 == 0:
save_data(path_to_save_medians, freq, 'DateTime', 'ALARM', 'max signal', list(range(num_chs)),
list(range(num_chs)))
if circle_buffer.check_init():
save_data(path_to_save_medians, freq, datetime.datetime.now(), alarm, max(sigs_array), sigs_array,
circle_buffer.get_medians())
if debug_flag:
single_alarm = circle_buffer.check_single_alarm(median, cur_channel)
print(cur_channel, single_alarm)
if single_alarm:
data = pack_elems(elems_to_save, file_types_to_save, tmp_signal.get_signal())
print('SAVE CURRENT SIGNAL SROCHNO TI MENYA SLISHISH?!?!?!?')
try:
remote_save_data(conn, data, module_name, freq, shared_folder, path_to_save_alarms)
except Exception as e:
print(f"Ошибка: {e}")
else:
print('VSE OKI DOKI SIGNAL SOKHRANYAT NE NUZHNO!!!')
f = multi_channel.change_channel()
except Exception as e:
print(str(e))
print(".", end='')
tmp_signal.clear()
return f

@ -0,0 +1,120 @@
import os
import datetime
from common.runtime import load_root_env, validate_env, as_bool, as_str
from smb.SMBConnection import SMBConnection
from utils.datas_processing import pack_elems, agregator, send_data, save_data, remote_save_data
from core.sig_n_medi_collect import Signal, SignalsArray, get_signal_length
from core.multichannelswitcher import MultiChannel, get_centre_freq
load_root_env(__file__)
freq_suffix = os.path.splitext(os.path.basename(__file__))[0].split("_")[-1]
validate_env(__file__, {
"send_to_module_flag": as_bool,
"save_data_flag": as_bool,
"elems_to_save": as_str,
"file_types_to_save": as_str,
"lochost": as_str,
"locport": as_str,
"freq_endpoint": as_str,
"path_to_save_medians": as_str,
"path_to_save_alarms": as_str,
"module_name": as_str,
f"f_step_{freq_suffix}": as_str,
f"f_bases_{freq_suffix}": as_str,
f"f_roofs_{freq_suffix}": as_str,
})
debug_flag = as_bool(os.getenv('debug_flag', '0'))
send_to_module_flag = as_bool(os.getenv('send_to_module_flag', '0'))
save_data_flag = as_bool(os.getenv('save_data_flag', '0'))
module_name = os.getenv('module_name')
elems_to_save = os.getenv('elems_to_save')
file_types_to_save = os.getenv('file_types_to_save')
localhost = os.getenv('lochost')
localport = os.getenv('locport')
f_step = [*map(float, os.getenv('f_step_5800').split())]
f_bases = [*map(float, os.getenv('f_bases_5800').split())]
f_roofs = [*map(float, os.getenv('f_roofs_5800').split())]
path_to_save_medians = os.getenv('path_to_save_medians')
path_to_save_alarms = os.getenv('path_to_save_alarms')
smb_host = os.getenv('smb_host')
smb_port = os.getenv('smb_port')
smb_user = os.getenv('smb_user')
smb_pass = os.getenv('smb_pass')
shared_folder = os.getenv('shared_folder')
the_pc_name = os.getenv('the_pc_name')
remote_pc_name = os.getenv('remote_pc_name')
smb_domain = os.getenv('smb_domain')
freq_endpoint = os.getenv('freq_endpoint')
elems_to_save = elems_to_save.split(',')
file_types_to_save = file_types_to_save.split(',')
tmp_signal = Signal()
tmp_sigs_array = SignalsArray()
multi_channel = MultiChannel(f_step, f_bases, f_roofs)
f = multi_channel.init_f()
multi_channel.fill_DB()
if debug_flag:
conn = SMBConnection(smb_user, smb_pass, the_pc_name, remote_pc_name, use_ntlm_v2=True)
conn.connect(smb_host, 139)
filelist = conn.listPath(shared_folder, '/')
print(filelist)
def work(lvl):
f = multi_channel.get_cur_channel()
freq = get_centre_freq(f)
signal_length = get_signal_length(freq)
median = tmp_signal.fill_signal(lvl, signal_length)
if median:
try:
num_chs, circle_buffer = multi_channel.check_f(f)
cur_channel, sigs_array = tmp_sigs_array.fill_sig_arr(median, num_chs)
if sigs_array:
print('Значения на {0}: {1}'.format(freq, sigs_array))
print('Пороги: ', circle_buffer.get_medians())
alarm = circle_buffer.check_alarm(sigs_array)
if alarm:
print('----ALARM---- ', freq)
multi_channel.db_alarms_zeros(circle_buffer)
else:
circle_buffer.update(sigs_array)
if send_to_module_flag:
send_data(agregator(freq, alarm), localhost, localport, freq_endpoint)
if save_data_flag:
if not circle_buffer.check_init() and circle_buffer.current_column - 1 == 0:
save_data(path_to_save_medians, freq, 'DateTime', 'ALARM', 'max signal', list(range(num_chs)),
list(range(num_chs)))
if circle_buffer.check_init():
save_data(path_to_save_medians, freq, datetime.datetime.now(), alarm, max(sigs_array), sigs_array,
circle_buffer.get_medians())
if debug_flag:
single_alarm = circle_buffer.check_single_alarm(median, cur_channel)
print(cur_channel, single_alarm)
if single_alarm:
data = pack_elems(elems_to_save, file_types_to_save, tmp_signal.get_signal())
print('SAVE CURRENT SIGNAL SROCHNO TI MENYA SLISHISH?!?!?!?')
try:
remote_save_data(conn, data, module_name, freq, shared_folder, path_to_save_alarms)
except Exception as e:
print(f"Ошибка: {e}")
else:
print('VSE OKI DOKI SIGNAL SOKHRANYAT NE NUZHNO!!!')
f = multi_channel.change_channel()
except Exception as e:
print(str(e))
print(".", end='')
tmp_signal.clear()
return f

@ -0,0 +1,123 @@
import os
import datetime
from common.runtime import load_root_env, validate_env, as_bool, as_str
from smb.SMBConnection import SMBConnection
from utils.datas_processing import pack_elems, agregator, send_data, save_data, remote_save_data
from core.sig_n_medi_collect import Signal, SignalsArray, get_signal_length
from core.multichannelswitcher import MultiChannel, get_centre_freq
load_root_env(__file__)
freq_suffix = os.path.splitext(os.path.basename(__file__))[0].split("_")[-1]
validate_env(__file__, {
"send_to_module_flag": as_bool,
"save_data_flag": as_bool,
"elems_to_save": as_str,
"file_types_to_save": as_str,
"lochost": as_str,
"locport": as_str,
"freq_endpoint": as_str,
"path_to_save_medians": as_str,
"path_to_save_alarms": as_str,
"module_name": as_str,
f"f_step_{freq_suffix}": as_str,
f"f_bases_{freq_suffix}": as_str,
f"f_roofs_{freq_suffix}": as_str,
})
debug_flag = as_bool(os.getenv('debug_flag', '0'))
send_to_module_flag = as_bool(os.getenv('send_to_module_flag', '0'))
save_data_flag = as_bool(os.getenv('save_data_flag', '0'))
module_name = os.getenv('module_name')
elems_to_save = os.getenv('elems_to_save')
file_types_to_save = os.getenv('file_types_to_save')
localhost = os.getenv('lochost')
localport = os.getenv('locport')
f_step = [*map(float, os.getenv('f_step_750').split())]
f_bases = [*map(float, os.getenv('f_bases_750').split())]
f_roofs = [*map(float, os.getenv('f_roofs_750').split())]
path_to_save_medians = os.getenv('path_to_save_medians')
path_to_save_alarms = os.getenv('path_to_save_alarms')
smb_host = os.getenv('smb_host')
smb_port = os.getenv('smb_port')
smb_user = os.getenv('smb_user')
smb_pass = os.getenv('smb_pass')
shared_folder = os.getenv('shared_folder')
the_pc_name = os.getenv('the_pc_name')
remote_pc_name = os.getenv('remote_pc_name')
smb_domain = os.getenv('smb_domain')
freq_endpoint = os.getenv('freq_endpoint')
elems_to_save = elems_to_save.split(',')
file_types_to_save = file_types_to_save.split(',')
tmp_signal = Signal()
tmp_sigs_array = SignalsArray()
multi_channel = MultiChannel(f_step, f_bases, f_roofs)
f = multi_channel.init_f()
multi_channel.fill_DB()
if debug_flag:
conn = SMBConnection(smb_user, smb_pass, the_pc_name, remote_pc_name, use_ntlm_v2=True)
conn.connect(smb_host, 139)
filelist = conn.listPath(shared_folder, '/')
print(filelist)
def work(lvl):
f = multi_channel.get_cur_channel()
freq = get_centre_freq(f)
signal_length = get_signal_length(freq)
median = tmp_signal.fill_signal(lvl, signal_length)
if median:
print(1)
try:
num_chs, circle_buffer = multi_channel.check_f(f)
print(num_chs, circle_buffer)
cur_channel, sigs_array = tmp_sigs_array.fill_sig_arr(median, num_chs)
print(3)
if sigs_array:
print('Значения на {0}: {1}'.format(freq, sigs_array))
print('Пороги: ', circle_buffer.get_medians())
alarm = circle_buffer.check_alarm(sigs_array)
if alarm:
print('----ALARM---- ', freq)
multi_channel.db_alarms_zeros(circle_buffer)
else:
circle_buffer.update(sigs_array)
if send_to_module_flag:
send_data(agregator(freq, alarm), localhost, localport, freq_endpoint)
if save_data_flag:
if not circle_buffer.check_init() and circle_buffer.current_column - 1 == 0:
save_data(path_to_save_medians, freq, 'DateTime', 'ALARM', 'max signal', list(range(num_chs)),
list(range(num_chs)))
if circle_buffer.check_init():
save_data(path_to_save_medians, freq, datetime.datetime.now(), alarm, max(sigs_array), sigs_array,
circle_buffer.get_medians())
if debug_flag:
single_alarm = circle_buffer.check_single_alarm(median, cur_channel)
print(cur_channel, single_alarm)
if single_alarm:
data = pack_elems(elems_to_save, file_types_to_save, tmp_signal.get_signal())
print('SAVE CURRENT SIGNAL SROCHNO TI MENYA SLISHISH?!?!?!?')
try:
remote_save_data(conn, data, module_name, freq, shared_folder, path_to_save_alarms)
except Exception as e:
print(f"Ошибка: {e}")
else:
print('VSE OKI DOKI SIGNAL SOKHRANYAT NE NUZHNO!!!')
f = multi_channel.change_channel()
except Exception as e:
print(str(e))
print(".", end='')
tmp_signal.clear()
return f

@ -0,0 +1,120 @@
import os
import datetime
from common.runtime import load_root_env, validate_env, as_bool, as_str
from smb.SMBConnection import SMBConnection
from utils.datas_processing import pack_elems, agregator, send_data, save_data, remote_save_data
from core.sig_n_medi_collect import Signal, SignalsArray, get_signal_length
from core.multichannelswitcher import MultiChannel, get_centre_freq
load_root_env(__file__)
freq_suffix = os.path.splitext(os.path.basename(__file__))[0].split("_")[-1]
validate_env(__file__, {
"send_to_module_flag": as_bool,
"save_data_flag": as_bool,
"elems_to_save": as_str,
"file_types_to_save": as_str,
"lochost": as_str,
"locport": as_str,
"freq_endpoint": as_str,
"path_to_save_medians": as_str,
"path_to_save_alarms": as_str,
"module_name": as_str,
f"f_step_{freq_suffix}": as_str,
f"f_bases_{freq_suffix}": as_str,
f"f_roofs_{freq_suffix}": as_str,
})
debug_flag = as_bool(os.getenv('debug_flag', '0'))
send_to_module_flag = as_bool(os.getenv('send_to_module_flag', '0'))
save_data_flag = as_bool(os.getenv('save_data_flag', '0'))
module_name = os.getenv('module_name')
elems_to_save = os.getenv('elems_to_save')
file_types_to_save = os.getenv('file_types_to_save')
localhost = os.getenv('lochost')
localport = os.getenv('locport')
f_step = [*map(float, os.getenv('f_step_868').split())]
f_bases = [*map(float, os.getenv('f_bases_868').split())]
f_roofs = [*map(float, os.getenv('f_roofs_868').split())]
path_to_save_medians = os.getenv('path_to_save_medians')
path_to_save_alarms = os.getenv('path_to_save_alarms')
smb_host = os.getenv('smb_host')
smb_port = os.getenv('smb_port')
smb_user = os.getenv('smb_user')
smb_pass = os.getenv('smb_pass')
shared_folder = os.getenv('shared_folder')
the_pc_name = os.getenv('the_pc_name')
remote_pc_name = os.getenv('remote_pc_name')
smb_domain = os.getenv('smb_domain')
freq_endpoint = os.getenv('freq_endpoint')
elems_to_save = elems_to_save.split(',')
file_types_to_save = file_types_to_save.split(',')
tmp_signal = Signal()
tmp_sigs_array = SignalsArray()
multi_channel = MultiChannel(f_step, f_bases, f_roofs)
f = multi_channel.init_f()
multi_channel.fill_DB()
if debug_flag:
conn = SMBConnection(smb_user, smb_pass, the_pc_name, remote_pc_name, use_ntlm_v2=True)
conn.connect(smb_host, 139)
filelist = conn.listPath(shared_folder, '/')
print(filelist)
def work(lvl):
f = multi_channel.get_cur_channel()
freq = get_centre_freq(f)
signal_length = get_signal_length(freq)
median = tmp_signal.fill_signal(lvl, signal_length)
if median:
try:
num_chs, circle_buffer = multi_channel.check_f(f)
cur_channel, sigs_array = tmp_sigs_array.fill_sig_arr(median, num_chs)
if sigs_array:
print('Значения на {0}: {1}'.format(freq, sigs_array))
print('Пороги: ', circle_buffer.get_medians())
alarm = circle_buffer.check_alarm(sigs_array)
if alarm:
print('----ALARM---- ', freq)
multi_channel.db_alarms_zeros(circle_buffer)
else:
circle_buffer.update(sigs_array)
if send_to_module_flag:
send_data(agregator(freq, alarm), localhost, localport, freq_endpoint)
if save_data_flag:
if not circle_buffer.check_init() and circle_buffer.current_column - 1 == 0:
save_data(path_to_save_medians, freq, 'DateTime', 'ALARM', 'max signal', list(range(num_chs)),
list(range(num_chs)))
if circle_buffer.check_init():
save_data(path_to_save_medians, freq, datetime.datetime.now(), alarm, max(sigs_array), sigs_array,
circle_buffer.get_medians())
if debug_flag:
single_alarm = circle_buffer.check_single_alarm(median, cur_channel)
print(cur_channel, single_alarm)
if single_alarm:
data = pack_elems(elems_to_save, file_types_to_save, tmp_signal.get_signal())
print('SAVE CURRENT SIGNAL SROCHNO TI MENYA SLISHISH?!?!?!?')
try:
remote_save_data(conn, data, module_name, freq, shared_folder, path_to_save_alarms)
except Exception as e:
print(f"Ошибка: {e}")
else:
print('VSE OKI DOKI SIGNAL SOKHRANYAT NE NUZHNO!!!')
f = multi_channel.change_channel()
except Exception as e:
print(str(e))
print(".", end='')
tmp_signal.clear()
return f

@ -0,0 +1,208 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# SPDX-License-Identifier: GPL-3.0
#
# GNU Radio Python Flow Graph
# Title: get_center_freq
# GNU Radio version: 3.8.1.0
from gnuradio import blocks
from gnuradio import gr
import sys
import signal
import embedded_3300 as my_freq # embedded python module
import osmosdr
import time
import threading
import subprocess
import os
from common.runtime import load_root_env, resolve_hackrf_index
load_root_env(__file__)
def get_hack_id():
return resolve_hackrf_index('hack_3300', 'src/main_3300.py')
serial_number = os.getenv('hack_3300')
pos = None
output = []
try:
# command = '/home/orangepi/hackrf/host/build/hackrf-tools/src/hackrf_info'
command = 'lsusb -v -d 1d50:6089 | grep iSerial'
output.append(subprocess.check_output(command, shell=True, text=True))
# indexes = [line.split(":")[1].strip() for line in output_lines if "Index" in line]
# serial_numbers = [line.split(":")[1].strip() for line in output_lines if "Serial number" in line]
# print(indexes)
# print(serial_numbers)
# for i, number in enumerate(serial_numbers):
# if number == serial_number:
# pos = i
# break
# if pos is not None:
# id = indexes[pos]
# else:
# print('Такого хака нет!')
except subprocess.CalledProcessError as e:
print(f"Команда завершилась с кодом возврата {e.returncode}")
print(e)
print(output)
output_lines = output[0].strip().split('\n')
print(output_lines)
serial_numbers = [line.split()[-1] for line in output_lines]
print(serial_numbers)
for i, number in enumerate(serial_numbers):
if number == serial_number:
id = i
break
if id is not None:
print('HackId is: {0}'.format(id))
return str(id)
else:
print('Такого хака нет!')
class get_center_freq(gr.top_block):
def __init__(self):
gr.top_block.__init__(self, "get_center_freq")
##################################################
# Variables
##################################################
self.prob_freq = prob_freq = 0
self.top_peaks_amount = top_peaks_amount = 20
self.samp_rate = samp_rate = 20e6
self.poll_rate = poll_rate = 10000
self.num_points = num_points = 8192
self.flag = flag = 1
self.decimation = decimation = 1
self.center_freq = center_freq = my_freq.work(prob_freq)
##################################################
# Blocks
##################################################
self.probSigVec = blocks.probe_signal_vc(4096)
self.rtlsdr_source_0 = osmosdr.source(
args="numchan=" + str(1) + " " + 'hackrf=' + get_hack_id()
)
self.rtlsdr_source_0.set_time_unknown_pps(osmosdr.time_spec_t())
self.rtlsdr_source_0.set_sample_rate(samp_rate)
self.rtlsdr_source_0.set_center_freq(center_freq, 0)
self.rtlsdr_source_0.set_freq_corr(0, 0)
self.rtlsdr_source_0.set_gain(100, 0)
self.rtlsdr_source_0.set_if_gain(100, 0)
self.rtlsdr_source_0.set_bb_gain(0, 0)
self.rtlsdr_source_0.set_antenna('', 0)
self.rtlsdr_source_0.set_bandwidth(0, 0)
self.rtlsdr_source_0.set_min_output_buffer(4096)
def _prob_freq_probe():
while True:
val = self.probSigVec.level()
try:
self.set_prob_freq(val)
except AttributeError:
pass
time.sleep(1.0 / (poll_rate))
_prob_freq_thread = threading.Thread(target=_prob_freq_probe)
_prob_freq_thread.daemon = True
_prob_freq_thread.start()
self.blocks_stream_to_vector_1 = blocks.stream_to_vector(gr.sizeof_gr_complex*1, 4096)
##################################################
# Connections
##################################################
self.connect((self.blocks_stream_to_vector_1, 0), (self.probSigVec, 0))
self.connect((self.rtlsdr_source_0, 0), (self.blocks_stream_to_vector_1, 0))
def get_prob_freq(self):
return self.prob_freq
def set_prob_freq(self, prob_freq):
self.prob_freq = prob_freq
self.set_center_freq(my_freq.work(self.prob_freq))
def get_top_peaks_amount(self):
return self.top_peaks_amount
def set_top_peaks_amount(self, top_peaks_amount):
self.top_peaks_amount = top_peaks_amount
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self.rtlsdr_source_0.set_sample_rate(self.samp_rate)
def get_poll_rate(self):
return self.poll_rate
def set_poll_rate(self, poll_rate):
self.poll_rate = poll_rate
def get_num_points(self):
return self.num_points
def set_num_points(self, num_points):
self.num_points = num_points
def get_flag(self):
return self.flag
def set_flag(self, flag):
self.flag = flag
def get_decimation(self):
return self.decimation
def set_decimation(self, decimation):
self.decimation = decimation
def get_center_freq(self):
return self.center_freq
def set_center_freq(self, center_freq):
self.center_freq = center_freq
self.rtlsdr_source_0.set_center_freq(self.center_freq, 0)
def main(top_block_cls=get_center_freq, options=None):
#for k in range(0, 3):
# light_diods_on_boot()
tb = top_block_cls()
def sig_handler(sig=None, frame=None):
tb.stop()
tb.wait()
sys.exit(0)
signal.signal(signal.SIGINT, sig_handler)
signal.signal(signal.SIGTERM, sig_handler)
tb.start()
try:
print('СЕРВИСНАЯ ИНФОРМАЦИЯ: ')
print('debug_flag: ', my_freq.debug_flag)
print('save_data_flag: ', my_freq.save_data_flag)
print('send_to_module_flag: ', my_freq.send_to_module_flag)
#print('multiply_factor: ', float(os.getenv('multiply_factor_' + '1200')))
#print('multiply_factor: ', float(os.getenv('multiply_factor_' + '715')))
except EOFError:
pass
#tb.stop()
tb.wait()
if __name__ == '__main__':
main()

@ -0,0 +1,208 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# SPDX-License-Identifier: GPL-3.0
#
# GNU Radio Python Flow Graph
# Title: get_center_freq
# GNU Radio version: 3.8.1.0
from gnuradio import blocks
from gnuradio import gr
import sys
import signal
import embedded_433 as my_freq # embedded python module
import osmosdr
import time
import threading
import subprocess
import os
from common.runtime import load_root_env, resolve_hackrf_index
load_root_env(__file__)
def get_hack_id():
return resolve_hackrf_index('hack_433', 'src/main_433.py')
serial_number = os.getenv('hack_433')
pos = None
output = []
try:
# command = '/home/orangepi/hackrf/host/build/hackrf-tools/src/hackrf_info'
command = 'lsusb -v -d 1d50:6089 | grep iSerial'
output.append(subprocess.check_output(command, shell=True, text=True))
# indexes = [line.split(":")[1].strip() for line in output_lines if "Index" in line]
# serial_numbers = [line.split(":")[1].strip() for line in output_lines if "Serial number" in line]
# print(indexes)
# print(serial_numbers)
# for i, number in enumerate(serial_numbers):
# if number == serial_number:
# pos = i
# break
# if pos is not None:
# id = indexes[pos]
# else:
# print('Такого хака нет!')
except subprocess.CalledProcessError as e:
print(f"Команда завершилась с кодом возврата {e.returncode}")
print(e)
print(output)
output_lines = output[0].strip().split('\n')
print(output_lines)
serial_numbers = [line.split()[-1] for line in output_lines]
print(serial_numbers)
for i, number in enumerate(serial_numbers):
if number == serial_number:
id = i
break
if id is not None:
print('HackId is: {0}'.format(id))
return str(id)
else:
print('Такого хака нет!')
class get_center_freq(gr.top_block):
def __init__(self):
gr.top_block.__init__(self, "get_center_freq")
##################################################
# Variables
##################################################
self.prob_freq = prob_freq = 0
self.top_peaks_amount = top_peaks_amount = 20
self.samp_rate = samp_rate = 20e6
self.poll_rate = poll_rate = 10000
self.num_points = num_points = 8192
self.flag = flag = 1
self.decimation = decimation = 1
self.center_freq = center_freq = my_freq.work(prob_freq)
##################################################
# Blocks
##################################################
self.probSigVec = blocks.probe_signal_vc(4096)
self.rtlsdr_source_0 = osmosdr.source(
args="numchan=" + str(1) + " " + 'hackrf=' + get_hack_id()
)
self.rtlsdr_source_0.set_time_unknown_pps(osmosdr.time_spec_t())
self.rtlsdr_source_0.set_sample_rate(samp_rate)
self.rtlsdr_source_0.set_center_freq(center_freq, 0)
self.rtlsdr_source_0.set_freq_corr(0, 0)
self.rtlsdr_source_0.set_gain(10, 0)
self.rtlsdr_source_0.set_if_gain(100, 0)
self.rtlsdr_source_0.set_bb_gain(0, 0)
self.rtlsdr_source_0.set_antenna('', 0)
self.rtlsdr_source_0.set_bandwidth(0, 0)
self.rtlsdr_source_0.set_min_output_buffer(4096)
def _prob_freq_probe():
while True:
val = self.probSigVec.level()
try:
self.set_prob_freq(val)
except AttributeError:
pass
time.sleep(1.0 / (poll_rate))
_prob_freq_thread = threading.Thread(target=_prob_freq_probe)
_prob_freq_thread.daemon = True
_prob_freq_thread.start()
self.blocks_stream_to_vector_1 = blocks.stream_to_vector(gr.sizeof_gr_complex*1, 4096)
##################################################
# Connections
##################################################
self.connect((self.blocks_stream_to_vector_1, 0), (self.probSigVec, 0))
self.connect((self.rtlsdr_source_0, 0), (self.blocks_stream_to_vector_1, 0))
def get_prob_freq(self):
return self.prob_freq
def set_prob_freq(self, prob_freq):
self.prob_freq = prob_freq
self.set_center_freq(my_freq.work(self.prob_freq))
def get_top_peaks_amount(self):
return self.top_peaks_amount
def set_top_peaks_amount(self, top_peaks_amount):
self.top_peaks_amount = top_peaks_amount
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self.rtlsdr_source_0.set_sample_rate(self.samp_rate)
def get_poll_rate(self):
return self.poll_rate
def set_poll_rate(self, poll_rate):
self.poll_rate = poll_rate
def get_num_points(self):
return self.num_points
def set_num_points(self, num_points):
self.num_points = num_points
def get_flag(self):
return self.flag
def set_flag(self, flag):
self.flag = flag
def get_decimation(self):
return self.decimation
def set_decimation(self, decimation):
self.decimation = decimation
def get_center_freq(self):
return self.center_freq
def set_center_freq(self, center_freq):
self.center_freq = center_freq
self.rtlsdr_source_0.set_center_freq(self.center_freq, 0)
def main(top_block_cls=get_center_freq, options=None):
#for k in range(0, 3):
# light_diods_on_boot()
tb = top_block_cls()
def sig_handler(sig=None, frame=None):
tb.stop()
tb.wait()
sys.exit(0)
signal.signal(signal.SIGINT, sig_handler)
signal.signal(signal.SIGTERM, sig_handler)
tb.start()
try:
print('СЕРВИСНАЯ ИНФОРМАЦИЯ: ')
print('debug_flag: ', my_freq.debug_flag)
print('save_data_flag: ', my_freq.save_data_flag)
print('send_to_module_flag: ', my_freq.send_to_module_flag)
#print('multiply_factor: ', float(os.getenv('multiply_factor_' + '1200')))
#print('multiply_factor: ', float(os.getenv('multiply_factor_' + '715')))
except EOFError:
pass
#tb.stop()
tb.wait()
if __name__ == '__main__':
main()

@ -0,0 +1,208 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# SPDX-License-Identifier: GPL-3.0
#
# GNU Radio Python Flow Graph
# Title: get_center_freq
# GNU Radio version: 3.8.1.0
from gnuradio import blocks
from gnuradio import gr
import sys
import signal
import embedded_4500 as my_freq # embedded python module
import osmosdr
import time
import threading
import subprocess
import os
from common.runtime import load_root_env, resolve_hackrf_index
load_root_env(__file__)
def get_hack_id():
return resolve_hackrf_index('hack_4500', 'src/main_4500.py')
serial_number = os.getenv('hack_4500')
pos = None
output = []
try:
# command = '/home/orangepi/hackrf/host/build/hackrf-tools/src/hackrf_info'
command = 'lsusb -v -d 1d50:6089 | grep iSerial'
output.append(subprocess.check_output(command, shell=True, text=True))
# indexes = [line.split(":")[1].strip() for line in output_lines if "Index" in line]
# serial_numbers = [line.split(":")[1].strip() for line in output_lines if "Serial number" in line]
# print(indexes)
# print(serial_numbers)
# for i, number in enumerate(serial_numbers):
# if number == serial_number:
# pos = i
# break
# if pos is not None:
# id = indexes[pos]
# else:
# print('Такого хака нет!')
except subprocess.CalledProcessError as e:
print(f"Команда завершилась с кодом возврата {e.returncode}")
print(e)
print(output)
output_lines = output[0].strip().split('\n')
print(output_lines)
serial_numbers = [line.split()[-1] for line in output_lines]
print(serial_numbers)
for i, number in enumerate(serial_numbers):
if number == serial_number:
id = i
break
if id is not None:
print('HackId is: {0}'.format(id))
return str(id)
else:
print('Такого хака нет!')
class get_center_freq(gr.top_block):
def __init__(self):
gr.top_block.__init__(self, "get_center_freq")
##################################################
# Variables
##################################################
self.prob_freq = prob_freq = 0
self.top_peaks_amount = top_peaks_amount = 20
self.samp_rate = samp_rate = 20e6
self.poll_rate = poll_rate = 10000
self.num_points = num_points = 8192
self.flag = flag = 1
self.decimation = decimation = 1
self.center_freq = center_freq = my_freq.work(prob_freq)
##################################################
# Blocks
##################################################
self.probSigVec = blocks.probe_signal_vc(4096)
self.rtlsdr_source_0 = osmosdr.source(
args="numchan=" + str(1) + " " + 'hackrf=' + get_hack_id()
)
self.rtlsdr_source_0.set_time_unknown_pps(osmosdr.time_spec_t())
self.rtlsdr_source_0.set_sample_rate(samp_rate)
self.rtlsdr_source_0.set_center_freq(center_freq, 0)
self.rtlsdr_source_0.set_freq_corr(0, 0)
self.rtlsdr_source_0.set_gain(100, 0)
self.rtlsdr_source_0.set_if_gain(100, 0)
self.rtlsdr_source_0.set_bb_gain(0, 0)
self.rtlsdr_source_0.set_antenna('', 0)
self.rtlsdr_source_0.set_bandwidth(0, 0)
self.rtlsdr_source_0.set_min_output_buffer(4096)
def _prob_freq_probe():
while True:
val = self.probSigVec.level()
try:
self.set_prob_freq(val)
except AttributeError:
pass
time.sleep(1.0 / (poll_rate))
_prob_freq_thread = threading.Thread(target=_prob_freq_probe)
_prob_freq_thread.daemon = True
_prob_freq_thread.start()
self.blocks_stream_to_vector_1 = blocks.stream_to_vector(gr.sizeof_gr_complex*1, 4096)
##################################################
# Connections
##################################################
self.connect((self.blocks_stream_to_vector_1, 0), (self.probSigVec, 0))
self.connect((self.rtlsdr_source_0, 0), (self.blocks_stream_to_vector_1, 0))
def get_prob_freq(self):
return self.prob_freq
def set_prob_freq(self, prob_freq):
self.prob_freq = prob_freq
self.set_center_freq(my_freq.work(self.prob_freq))
def get_top_peaks_amount(self):
return self.top_peaks_amount
def set_top_peaks_amount(self, top_peaks_amount):
self.top_peaks_amount = top_peaks_amount
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self.rtlsdr_source_0.set_sample_rate(self.samp_rate)
def get_poll_rate(self):
return self.poll_rate
def set_poll_rate(self, poll_rate):
self.poll_rate = poll_rate
def get_num_points(self):
return self.num_points
def set_num_points(self, num_points):
self.num_points = num_points
def get_flag(self):
return self.flag
def set_flag(self, flag):
self.flag = flag
def get_decimation(self):
return self.decimation
def set_decimation(self, decimation):
self.decimation = decimation
def get_center_freq(self):
return self.center_freq
def set_center_freq(self, center_freq):
self.center_freq = center_freq
self.rtlsdr_source_0.set_center_freq(self.center_freq, 0)
def main(top_block_cls=get_center_freq, options=None):
#for k in range(0, 3):
# light_diods_on_boot()
tb = top_block_cls()
def sig_handler(sig=None, frame=None):
tb.stop()
tb.wait()
sys.exit(0)
signal.signal(signal.SIGINT, sig_handler)
signal.signal(signal.SIGTERM, sig_handler)
tb.start()
try:
print('СЕРВИСНАЯ ИНФОРМАЦИЯ: ')
print('debug_flag: ', my_freq.debug_flag)
print('save_data_flag: ', my_freq.save_data_flag)
print('send_to_module_flag: ', my_freq.send_to_module_flag)
#print('multiply_factor: ', float(os.getenv('multiply_factor_' + '1200')))
#print('multiply_factor: ', float(os.getenv('multiply_factor_' + '715')))
except EOFError:
pass
#tb.stop()
tb.wait()
if __name__ == '__main__':
main()

@ -0,0 +1,208 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# SPDX-License-Identifier: GPL-3.0
#
# GNU Radio Python Flow Graph
# Title: get_center_freq
# GNU Radio version: 3.8.1.0
from gnuradio import blocks
from gnuradio import gr
import sys
import signal
import embedded_5200 as my_freq # embedded python module
import osmosdr
import time
import threading
import subprocess
import os
from common.runtime import load_root_env, resolve_hackrf_index
load_root_env(__file__)
def get_hack_id():
return resolve_hackrf_index('hack_5200', 'src/main_5200.py')
serial_number = os.getenv('hack_5200')
pos = None
output = []
try:
# command = '/home/orangepi/hackrf/host/build/hackrf-tools/src/hackrf_info'
command = 'lsusb -v -d 1d50:6089 | grep iSerial'
output.append(subprocess.check_output(command, shell=True, text=True))
# indexes = [line.split(":")[1].strip() for line in output_lines if "Index" in line]
# serial_numbers = [line.split(":")[1].strip() for line in output_lines if "Serial number" in line]
# print(indexes)
# print(serial_numbers)
# for i, number in enumerate(serial_numbers):
# if number == serial_number:
# pos = i
# break
# if pos is not None:
# id = indexes[pos]
# else:
# print('Такого хака нет!')
except subprocess.CalledProcessError as e:
print(f"Команда завершилась с кодом возврата {e.returncode}")
print(e)
print(output)
output_lines = output[0].strip().split('\n')
print(output_lines)
serial_numbers = [line.split()[-1] for line in output_lines]
print(serial_numbers)
for i, number in enumerate(serial_numbers):
if number == serial_number:
id = i
break
if id is not None:
print('HackId is: {0}'.format(id))
return str(id)
else:
print('Такого хака нет!')
class get_center_freq(gr.top_block):
def __init__(self):
gr.top_block.__init__(self, "get_center_freq")
##################################################
# Variables
##################################################
self.prob_freq = prob_freq = 0
self.top_peaks_amount = top_peaks_amount = 20
self.samp_rate = samp_rate = 20e6
self.poll_rate = poll_rate = 10000
self.num_points = num_points = 8192
self.flag = flag = 1
self.decimation = decimation = 1
self.center_freq = center_freq = my_freq.work(prob_freq)
##################################################
# Blocks
##################################################
self.probSigVec = blocks.probe_signal_vc(4096)
self.rtlsdr_source_0 = osmosdr.source(
args="numchan=" + str(1) + " " + 'hackrf=' + get_hack_id()
)
self.rtlsdr_source_0.set_time_unknown_pps(osmosdr.time_spec_t())
self.rtlsdr_source_0.set_sample_rate(samp_rate)
self.rtlsdr_source_0.set_center_freq(center_freq, 0)
self.rtlsdr_source_0.set_freq_corr(0, 0)
self.rtlsdr_source_0.set_gain(100, 0)
self.rtlsdr_source_0.set_if_gain(100, 0)
self.rtlsdr_source_0.set_bb_gain(0, 0)
self.rtlsdr_source_0.set_antenna('', 0)
self.rtlsdr_source_0.set_bandwidth(0, 0)
self.rtlsdr_source_0.set_min_output_buffer(4096)
def _prob_freq_probe():
while True:
val = self.probSigVec.level()
try:
self.set_prob_freq(val)
except AttributeError:
pass
time.sleep(1.0 / (poll_rate))
_prob_freq_thread = threading.Thread(target=_prob_freq_probe)
_prob_freq_thread.daemon = True
_prob_freq_thread.start()
self.blocks_stream_to_vector_1 = blocks.stream_to_vector(gr.sizeof_gr_complex*1, 4096)
##################################################
# Connections
##################################################
self.connect((self.blocks_stream_to_vector_1, 0), (self.probSigVec, 0))
self.connect((self.rtlsdr_source_0, 0), (self.blocks_stream_to_vector_1, 0))
def get_prob_freq(self):
return self.prob_freq
def set_prob_freq(self, prob_freq):
self.prob_freq = prob_freq
self.set_center_freq(my_freq.work(self.prob_freq))
def get_top_peaks_amount(self):
return self.top_peaks_amount
def set_top_peaks_amount(self, top_peaks_amount):
self.top_peaks_amount = top_peaks_amount
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self.rtlsdr_source_0.set_sample_rate(self.samp_rate)
def get_poll_rate(self):
return self.poll_rate
def set_poll_rate(self, poll_rate):
self.poll_rate = poll_rate
def get_num_points(self):
return self.num_points
def set_num_points(self, num_points):
self.num_points = num_points
def get_flag(self):
return self.flag
def set_flag(self, flag):
self.flag = flag
def get_decimation(self):
return self.decimation
def set_decimation(self, decimation):
self.decimation = decimation
def get_center_freq(self):
return self.center_freq
def set_center_freq(self, center_freq):
self.center_freq = center_freq
self.rtlsdr_source_0.set_center_freq(self.center_freq, 0)
def main(top_block_cls=get_center_freq, options=None):
#for k in range(0, 3):
# light_diods_on_boot()
tb = top_block_cls()
def sig_handler(sig=None, frame=None):
tb.stop()
tb.wait()
sys.exit(0)
signal.signal(signal.SIGINT, sig_handler)
signal.signal(signal.SIGTERM, sig_handler)
tb.start()
try:
print('СЕРВИСНАЯ ИНФОРМАЦИЯ: ')
print('debug_flag: ', my_freq.debug_flag)
print('save_data_flag: ', my_freq.save_data_flag)
print('send_to_module_flag: ', my_freq.send_to_module_flag)
#print('multiply_factor: ', float(os.getenv('multiply_factor_' + '1200')))
#print('multiply_factor: ', float(os.getenv('multiply_factor_' + '715')))
except EOFError:
pass
#tb.stop()
tb.wait()
if __name__ == '__main__':
main()

@ -0,0 +1,208 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# SPDX-License-Identifier: GPL-3.0
#
# GNU Radio Python Flow Graph
# Title: get_center_freq
# GNU Radio version: 3.8.1.0
from gnuradio import blocks
from gnuradio import gr
import sys
import signal
import embedded_5800 as my_freq # embedded python module
import osmosdr
import time
import threading
import subprocess
import os
from common.runtime import load_root_env, resolve_hackrf_index
load_root_env(__file__)
def get_hack_id():
return resolve_hackrf_index('hack_5800', 'src/main_5800.py')
serial_number = os.getenv('hack_5800')
pos = None
output = []
try:
# command = '/home/orangepi/hackrf/host/build/hackrf-tools/src/hackrf_info'
command = 'lsusb -v -d 1d50:6089 | grep iSerial'
output.append(subprocess.check_output(command, shell=True, text=True))
# indexes = [line.split(":")[1].strip() for line in output_lines if "Index" in line]
# serial_numbers = [line.split(":")[1].strip() for line in output_lines if "Serial number" in line]
# print(indexes)
# print(serial_numbers)
# for i, number in enumerate(serial_numbers):
# if number == serial_number:
# pos = i
# break
# if pos is not None:
# id = indexes[pos]
# else:
# print('Такого хака нет!')
except subprocess.CalledProcessError as e:
print(f"Команда завершилась с кодом возврата {e.returncode}")
print(e)
print(output)
output_lines = output[0].strip().split('\n')
print(output_lines)
serial_numbers = [line.split()[-1] for line in output_lines]
print(serial_numbers)
for i, number in enumerate(serial_numbers):
if number == serial_number:
id = i
break
if id is not None:
print('HackId is: {0}'.format(id))
return str(id)
else:
print('Такого хака нет!')
class get_center_freq(gr.top_block):
def __init__(self):
gr.top_block.__init__(self, "get_center_freq")
##################################################
# Variables
##################################################
self.prob_freq = prob_freq = 0
self.top_peaks_amount = top_peaks_amount = 20
self.samp_rate = samp_rate = 20e6
self.poll_rate = poll_rate = 10000
self.num_points = num_points = 8192
self.flag = flag = 1
self.decimation = decimation = 1
self.center_freq = center_freq = my_freq.work(prob_freq)
##################################################
# Blocks
##################################################
self.probSigVec = blocks.probe_signal_vc(4096)
self.rtlsdr_source_0 = osmosdr.source(
args="numchan=" + str(1) + " " + 'hackrf=' + get_hack_id()
)
self.rtlsdr_source_0.set_time_unknown_pps(osmosdr.time_spec_t())
self.rtlsdr_source_0.set_sample_rate(samp_rate)
self.rtlsdr_source_0.set_center_freq(center_freq, 0)
self.rtlsdr_source_0.set_freq_corr(0, 0)
self.rtlsdr_source_0.set_gain(5, 0)
self.rtlsdr_source_0.set_if_gain(100, 0)
self.rtlsdr_source_0.set_bb_gain(100, 0)
self.rtlsdr_source_0.set_antenna('', 0)
self.rtlsdr_source_0.set_bandwidth(0, 0)
self.rtlsdr_source_0.set_min_output_buffer(4096)
def _prob_freq_probe():
while True:
val = self.probSigVec.level()
try:
self.set_prob_freq(val)
except AttributeError:
pass
time.sleep(1.0 / (poll_rate))
_prob_freq_thread = threading.Thread(target=_prob_freq_probe)
_prob_freq_thread.daemon = True
_prob_freq_thread.start()
self.blocks_stream_to_vector_1 = blocks.stream_to_vector(gr.sizeof_gr_complex*1, 4096)
##################################################
# Connections
##################################################
self.connect((self.blocks_stream_to_vector_1, 0), (self.probSigVec, 0))
self.connect((self.rtlsdr_source_0, 0), (self.blocks_stream_to_vector_1, 0))
def get_prob_freq(self):
return self.prob_freq
def set_prob_freq(self, prob_freq):
self.prob_freq = prob_freq
self.set_center_freq(my_freq.work(self.prob_freq))
def get_top_peaks_amount(self):
return self.top_peaks_amount
def set_top_peaks_amount(self, top_peaks_amount):
self.top_peaks_amount = top_peaks_amount
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self.rtlsdr_source_0.set_sample_rate(self.samp_rate)
def get_poll_rate(self):
return self.poll_rate
def set_poll_rate(self, poll_rate):
self.poll_rate = poll_rate
def get_num_points(self):
return self.num_points
def set_num_points(self, num_points):
self.num_points = num_points
def get_flag(self):
return self.flag
def set_flag(self, flag):
self.flag = flag
def get_decimation(self):
return self.decimation
def set_decimation(self, decimation):
self.decimation = decimation
def get_center_freq(self):
return self.center_freq
def set_center_freq(self, center_freq):
self.center_freq = center_freq
self.rtlsdr_source_0.set_center_freq(self.center_freq, 0)
def main(top_block_cls=get_center_freq, options=None):
#for k in range(0, 3):
# light_diods_on_boot()
tb = top_block_cls()
def sig_handler(sig=None, frame=None):
tb.stop()
tb.wait()
sys.exit(0)
signal.signal(signal.SIGINT, sig_handler)
signal.signal(signal.SIGTERM, sig_handler)
tb.start()
try:
print('СЕРВИСНАЯ ИНФОРМАЦИЯ: ')
print('debug_flag: ', my_freq.debug_flag)
print('save_data_flag: ', my_freq.save_data_flag)
print('send_to_module_flag: ', my_freq.send_to_module_flag)
#print('multiply_factor: ', float(os.getenv('multiply_factor_' + '1200')))
#print('multiply_factor: ', float(os.getenv('multiply_factor_' + '715')))
except EOFError:
pass
#tb.stop()
tb.wait()
if __name__ == '__main__':
main()

@ -0,0 +1,208 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# SPDX-License-Identifier: GPL-3.0
#
# GNU Radio Python Flow Graph
# Title: get_center_freq
# GNU Radio version: 3.8.1.0
from gnuradio import blocks
from gnuradio import gr
import sys
import signal
import embedded_750 as my_freq # embedded python module
import osmosdr
import time
import threading
import subprocess
import os
from common.runtime import load_root_env, resolve_hackrf_index
load_root_env(__file__)
def get_hack_id():
return resolve_hackrf_index('hack_750', 'src/main_750.py')
serial_number = os.getenv('hack_750')
pos = None
output = []
try:
# command = '/home/orangepi/hackrf/host/build/hackrf-tools/src/hackrf_info'
command = 'lsusb -v -d 1d50:6089 | grep iSerial'
output.append(subprocess.check_output(command, shell=True, text=True))
# indexes = [line.split(":")[1].strip() for line in output_lines if "Index" in line]
# serial_numbers = [line.split(":")[1].strip() for line in output_lines if "Serial number" in line]
# print(indexes)
# print(serial_numbers)
# for i, number in enumerate(serial_numbers):
# if number == serial_number:
# pos = i
# break
# if pos is not None:
# id = indexes[pos]
# else:
# print('Такого хака нет!')
except subprocess.CalledProcessError as e:
print(f"Команда завершилась с кодом возврата {e.returncode}")
print(e)
print(output)
output_lines = output[0].strip().split('\n')
print(output_lines)
serial_numbers = [line.split()[-1] for line in output_lines]
print(serial_numbers)
for i, number in enumerate(serial_numbers):
if number == serial_number:
id = i
break
if id is not None:
print('HackId is: {0}'.format(id))
return str(id)
else:
print('Такого хака нет!')
class get_center_freq(gr.top_block):
def __init__(self):
gr.top_block.__init__(self, "get_center_freq")
##################################################
# Variables
##################################################
self.prob_freq = prob_freq = 0
self.top_peaks_amount = top_peaks_amount = 20
self.samp_rate = samp_rate = 20e6
self.poll_rate = poll_rate = 10000
self.num_points = num_points = 8192
self.flag = flag = 1
self.decimation = decimation = 1
self.center_freq = center_freq = my_freq.work(prob_freq)
##################################################
# Blocks
##################################################
self.probSigVec = blocks.probe_signal_vc(4096)
self.rtlsdr_source_0 = osmosdr.source(
args="numchan=" + str(1) + " " + 'hackrf=' + get_hack_id()
)
self.rtlsdr_source_0.set_time_unknown_pps(osmosdr.time_spec_t())
self.rtlsdr_source_0.set_sample_rate(samp_rate)
self.rtlsdr_source_0.set_center_freq(center_freq, 0)
self.rtlsdr_source_0.set_freq_corr(0, 0)
self.rtlsdr_source_0.set_gain(100, 0)
self.rtlsdr_source_0.set_if_gain(100, 0)
self.rtlsdr_source_0.set_bb_gain(0, 0)
self.rtlsdr_source_0.set_antenna('', 0)
self.rtlsdr_source_0.set_bandwidth(0, 0)
self.rtlsdr_source_0.set_min_output_buffer(4096)
def _prob_freq_probe():
while True:
val = self.probSigVec.level()
try:
self.set_prob_freq(val)
except AttributeError:
pass
time.sleep(1.0 / (poll_rate))
_prob_freq_thread = threading.Thread(target=_prob_freq_probe)
_prob_freq_thread.daemon = True
_prob_freq_thread.start()
self.blocks_stream_to_vector_1 = blocks.stream_to_vector(gr.sizeof_gr_complex*1, 4096)
##################################################
# Connections
##################################################
self.connect((self.blocks_stream_to_vector_1, 0), (self.probSigVec, 0))
self.connect((self.rtlsdr_source_0, 0), (self.blocks_stream_to_vector_1, 0))
def get_prob_freq(self):
return self.prob_freq
def set_prob_freq(self, prob_freq):
self.prob_freq = prob_freq
self.set_center_freq(my_freq.work(self.prob_freq))
def get_top_peaks_amount(self):
return self.top_peaks_amount
def set_top_peaks_amount(self, top_peaks_amount):
self.top_peaks_amount = top_peaks_amount
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self.rtlsdr_source_0.set_sample_rate(self.samp_rate)
def get_poll_rate(self):
return self.poll_rate
def set_poll_rate(self, poll_rate):
self.poll_rate = poll_rate
def get_num_points(self):
return self.num_points
def set_num_points(self, num_points):
self.num_points = num_points
def get_flag(self):
return self.flag
def set_flag(self, flag):
self.flag = flag
def get_decimation(self):
return self.decimation
def set_decimation(self, decimation):
self.decimation = decimation
def get_center_freq(self):
return self.center_freq
def set_center_freq(self, center_freq):
self.center_freq = center_freq
self.rtlsdr_source_0.set_center_freq(self.center_freq, 0)
def main(top_block_cls=get_center_freq, options=None):
#for k in range(0, 3):
# light_diods_on_boot()
tb = top_block_cls()
def sig_handler(sig=None, frame=None):
tb.stop()
tb.wait()
sys.exit(0)
signal.signal(signal.SIGINT, sig_handler)
signal.signal(signal.SIGTERM, sig_handler)
tb.start()
try:
print('СЕРВИСНАЯ ИНФОРМАЦИЯ: ')
print('debug_flag: ', my_freq.debug_flag)
print('save_data_flag: ', my_freq.save_data_flag)
print('send_to_module_flag: ', my_freq.send_to_module_flag)
#print('multiply_factor: ', float(os.getenv('multiply_factor_' + '1200')))
#print('multiply_factor: ', float(os.getenv('multiply_factor_' + '715')))
except EOFError:
pass
#tb.stop()
tb.wait()
if __name__ == '__main__':
main()

@ -0,0 +1,208 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# SPDX-License-Identifier: GPL-3.0
#
# GNU Radio Python Flow Graph
# Title: get_center_freq
# GNU Radio version: 3.8.1.0
from gnuradio import blocks
from gnuradio import gr
import sys
import signal
import embedded_868 as my_freq # embedded python module
import osmosdr
import time
import threading
import subprocess
import os
from common.runtime import load_root_env, resolve_hackrf_index
load_root_env(__file__)
def get_hack_id():
return resolve_hackrf_index('hack_868', 'src/main_868.py')
serial_number = os.getenv('hack_868')
pos = None
output = []
try:
# command = '/home/orangepi/hackrf/host/build/hackrf-tools/src/hackrf_info'
command = 'lsusb -v -d 1d50:6089 | grep iSerial'
output.append(subprocess.check_output(command, shell=True, text=True))
# indexes = [line.split(":")[1].strip() for line in output_lines if "Index" in line]
# serial_numbers = [line.split(":")[1].strip() for line in output_lines if "Serial number" in line]
# print(indexes)
# print(serial_numbers)
# for i, number in enumerate(serial_numbers):
# if number == serial_number:
# pos = i
# break
# if pos is not None:
# id = indexes[pos]
# else:
# print('Такого хака нет!')
except subprocess.CalledProcessError as e:
print(f"Команда завершилась с кодом возврата {e.returncode}")
print(e)
print(output)
output_lines = output[0].strip().split('\n')
print(output_lines)
serial_numbers = [line.split()[-1] for line in output_lines]
print(serial_numbers)
for i, number in enumerate(serial_numbers):
if number == serial_number:
id = i
break
if id is not None:
print('HackId is: {0}'.format(id))
return str(id)
else:
print('Такого хака нет!')
class get_center_freq(gr.top_block):
def __init__(self):
gr.top_block.__init__(self, "get_center_freq")
##################################################
# Variables
##################################################
self.prob_freq = prob_freq = 0
self.top_peaks_amount = top_peaks_amount = 20
self.samp_rate = samp_rate = 20e6
self.poll_rate = poll_rate = 10000
self.num_points = num_points = 8192
self.flag = flag = 1
self.decimation = decimation = 1
self.center_freq = center_freq = my_freq.work(prob_freq)
##################################################
# Blocks
##################################################
self.probSigVec = blocks.probe_signal_vc(4096)
self.rtlsdr_source_0 = osmosdr.source(
args="numchan=" + str(1) + " " + 'hackrf=' + get_hack_id()
)
self.rtlsdr_source_0.set_time_unknown_pps(osmosdr.time_spec_t())
self.rtlsdr_source_0.set_sample_rate(samp_rate)
self.rtlsdr_source_0.set_center_freq(center_freq, 0)
self.rtlsdr_source_0.set_freq_corr(0, 0)
self.rtlsdr_source_0.set_gain(100, 0)
self.rtlsdr_source_0.set_if_gain(100, 0)
self.rtlsdr_source_0.set_bb_gain(0, 0)
self.rtlsdr_source_0.set_antenna('', 0)
self.rtlsdr_source_0.set_bandwidth(0, 0)
self.rtlsdr_source_0.set_min_output_buffer(4096)
def _prob_freq_probe():
while True:
val = self.probSigVec.level()
try:
self.set_prob_freq(val)
except AttributeError:
pass
time.sleep(1.0 / (poll_rate))
_prob_freq_thread = threading.Thread(target=_prob_freq_probe)
_prob_freq_thread.daemon = True
_prob_freq_thread.start()
self.blocks_stream_to_vector_1 = blocks.stream_to_vector(gr.sizeof_gr_complex*1, 4096)
##################################################
# Connections
##################################################
self.connect((self.blocks_stream_to_vector_1, 0), (self.probSigVec, 0))
self.connect((self.rtlsdr_source_0, 0), (self.blocks_stream_to_vector_1, 0))
def get_prob_freq(self):
return self.prob_freq
def set_prob_freq(self, prob_freq):
self.prob_freq = prob_freq
self.set_center_freq(my_freq.work(self.prob_freq))
def get_top_peaks_amount(self):
return self.top_peaks_amount
def set_top_peaks_amount(self, top_peaks_amount):
self.top_peaks_amount = top_peaks_amount
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self.rtlsdr_source_0.set_sample_rate(self.samp_rate)
def get_poll_rate(self):
return self.poll_rate
def set_poll_rate(self, poll_rate):
self.poll_rate = poll_rate
def get_num_points(self):
return self.num_points
def set_num_points(self, num_points):
self.num_points = num_points
def get_flag(self):
return self.flag
def set_flag(self, flag):
self.flag = flag
def get_decimation(self):
return self.decimation
def set_decimation(self, decimation):
self.decimation = decimation
def get_center_freq(self):
return self.center_freq
def set_center_freq(self, center_freq):
self.center_freq = center_freq
self.rtlsdr_source_0.set_center_freq(self.center_freq, 0)
def main(top_block_cls=get_center_freq, options=None):
#for k in range(0, 3):
# light_diods_on_boot()
tb = top_block_cls()
def sig_handler(sig=None, frame=None):
tb.stop()
tb.wait()
sys.exit(0)
signal.signal(signal.SIGINT, sig_handler)
signal.signal(signal.SIGTERM, sig_handler)
tb.start()
try:
print('СЕРВИСНАЯ ИНФОРМАЦИЯ: ')
print('debug_flag: ', my_freq.debug_flag)
print('save_data_flag: ', my_freq.save_data_flag)
print('send_to_module_flag: ', my_freq.send_to_module_flag)
#print('multiply_factor: ', float(os.getenv('multiply_factor_' + '1200')))
#print('multiply_factor: ', float(os.getenv('multiply_factor_' + '715')))
except EOFError:
pass
#tb.stop()
tb.wait()
if __name__ == '__main__':
main()

File diff suppressed because it is too large Load Diff

@ -0,0 +1,500 @@
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import os
import json
import httpx
import asyncio
import requests
import websockets
from copy import deepcopy
from fastapi import FastAPI
from common.runtime import load_root_env, validate_env, as_bool, as_float, as_int, as_str
from datetime import datetime, timedelta
app = FastAPI()
############################################################################
# VARIABLES
############################################################################
load_root_env(__file__)
validate_env("src/server_to_master.py", {
"lochost": as_str,
"locport": as_int,
"jamhost": as_str,
"jamport": as_int,
"master_server_ip": as_str,
"master_server_port": as_int,
"freqs": as_str,
"num_of_clear_packs": as_int,
"threshold_to_alarm": as_int,
"time_to_jam": as_int,
"time_to_fresh": as_int,
"active_interval_to_send": as_int,
"passive_interval_to_send": as_int,
"jammer_timeout": as_int,
"master_timeout": as_int,
"debug_module_flag": as_bool,
"send_to_module_flag": as_bool,
"send_to_master_flag": as_bool,
"send_to_jammer_flag": as_bool,
"latitude": as_float,
"longitude": as_float,
})
lochost = os.getenv('lochost')
locport = os.getenv('locport')
jamhost = os.getenv('jamhost')
jamport = os.getenv('jamport')
master_server_ip = os.getenv('master_server_ip')
master_server_port = os.getenv('master_server_port')
freqs = [str(x) for x in os.getenv('freqs').split(',')]
num_of_clear_packs = int(os.getenv('num_of_clear_packs'))
threshold_to_alarm = int(os.getenv('threshold_to_alarm'))
time_to_jam = int(os.getenv('time_to_jam'))
time_to_fresh = int(os.getenv('time_to_fresh'))
active_interval_to_send = int(os.getenv('active_interval_to_send'))
passive_interval_to_send = int(os.getenv('passive_interval_to_send'))
jammer_timeout = int(os.getenv('jammer_timeout'))
master_timeout = int(os.getenv('master_timeout'))
debug_module_flag = as_bool(os.getenv('debug_module_flag', '0'))
send_to_module_flag = as_bool(os.getenv('send_to_module_flag', '0'))
send_to_master_flag = as_bool(os.getenv('send_to_master_flag', '0'))
send_to_jammer_flag = as_bool(os.getenv('send_to_jammer_flag', '0'))
latitude = float(os.getenv('latitude'))
longitude = float(os.getenv('longitude'))
i = 0
flag = 0
max_len_bulk = 1
bulk_data = []
sending_data_task = None
jam_server_connect = None
alarm = False
jammer_event = False
data_queue = [None] * len(freqs)
freqs_alarm = {freq: 0 for freq in freqs}
#TODO:
# 1. Вырезать flag, если нужно и возможно.
# 2. Пофиксить костыль с asyncio.create_task(sending_data) - после jammer_event'a перестает подавать признаки жизни,
# поэтому гасим, когда включается глушилка и запускаем заново, когда глушилка отключается.
# 3. Пофиксить момент с data_queue:из-за асинхронных функций и старой реализации сервака происходит так ,что
# прилетает пакет аларма, система это видит, хочет его отправить, начинает отправку и из-за того, что это немного
# долгий процесс, то успевает прилететь чистый пакет и на мастер улетает чистый пакет.
# 4. Добавить print, только если deub_module_flag.
############################################################################
# GPS MODULE - INACTIVE
############################################################################
# Создание планировщика
# scheduler = BackgroundScheduler(daemon=True)
# scheduler.start()
# @app.route('/get_gps', methods = ['POST'])
# @scheduler.scheduled_job(IntervalTrigger(minutes=1))
# def update_gps_coordinates():
# # data_gps = request.json
# result = {
# 'latitude': latitude,
# 'longitude': longitude
# }
# try:
# url = "http://{0}:{1}/data/gps/{2}".format(master_server_ip, master_server_port, mac_address)
# response = requests.post(url, json=result)
# if response.status_code == 200:
# print('gps успешно отправлен')
# else:
# print('gps не был отправлен')
#
# except Exception:
# print('gps не были отправлены из-за отстутствия сервера в поле видимости')
# return result
#
#
# @scheduler.scheduled_job(IntervalTrigger(seconds=10))
# def send_gps_to_master():
# try:
# subprocess.run(["python3", "GPS_get_coords.py"])
# #mac_address = get_mac_address()
# data_gps = update_gps_coordinates()
# url = "http://{0}:{1}/data/gps/{2}".format(master_server_ip, master_server_port, mac_address)
# response = requests.post(url, json=data_gps)
# if response.status_code == 200:
# print('gps успешно отправлен')
# else:
# print('gps не был отправлены по какой-то причине')
# except Exception:
# print('gps не были отправлены по причине отсутствия сервера в поле видимости')
#
############################################################################
# MODULE RIGISTR
############################################################################
def get_mac_address(interface='enp5s0'):
"""
Получить мак текущего устройства, на котором развернут модуль сервер.
:param interface:
"""
try:
result = os.popen('sudo ifconfig ' + interface).read()
mac_index = result.find('ether') # Индекс начала строки с MAC-адресом
if mac_index != -1:
mac_address = result[mac_index + 6:mac_index + 23]
return mac_address
else:
return None
except Exception as e:
print("Ошибка при получении MAC-адреса:" + str(e))
return None
def get_ip_address(interface='enp5s0'):
"""
Получить айпишник текущего устройства, на котором развернут модуль сервер.
:param interface:
"""
try:
result = os.popen('sudo ifconfig ' + interface).read()
ip_index = result.find('inet') # Индекс начала строки с IP-адресом
if ip_index != -1:
ip_address = result[ip_index + 5:ip_index + 19]
return ip_address.strip()
else:
return None
except Exception as e:
print("Ошибка при получении IP-адреса:" + str(e))
return None
def register_module():
"""
Регистрация модуля на мастер сервере.
"""
data = {'mac': get_mac_address(),
'ip': get_ip_address()}
try:
url = f"http://{master_server_ip}:{master_server_port}/module/register"
response = requests.post(url, json=data)
response.raise_for_status() # Проверка успешности запроса
print("Модуль зарегистрирован успешно = ", data)
except requests.exceptions.RequestException as e:
flag = 1
print("Ошибка при регистрации модуля:" + str(e), data)
############################################################################
# SEND DATA TO MASTER
############################################################################
async def send_to_master(ModuleDataSingleV2, flag):
"""
Отправка данных на мастер по посту или через булк. По посту установлен лимит времени на отправку. В случае его
превышения - данные не отправлены. В случа неудачи отправки по любому из методов - данные не отправлены.
:param ModuleDataSingleV2: Пакет данных.
:param flag:
:return:
"""
mac_address = get_mac_address()
async with httpx.AsyncClient() as client:
try:
if flag == 0:
url = f"http://{master_server_ip}:{master_server_port}/data/single/{mac_address}"
else:
url = f"http://{master_server_ip}:{master_server_port}/data/bulk/{mac_address}"
response = await client.post(url, json=ModuleDataSingleV2, timeout=master_timeout)
if response.status_code == 200:
print('Данные успешно отправлены')
flag = 0
bulk_data.clear()
else:
flag = 1
if len(bulk_data) > max_len_bulk: # Если лимит bulk_data превышен, то удаляем первый элемент списка
bulk_data.pop(0)
bulk_data.append(ModuleDataSingleV2)
print('Данные не были отправлены по какой-то причине')
except (httpx.RequestError, asyncio.TimeoutError) as e:
if len(bulk_data) > max_len_bulk: # Если лимит bulk_data превышен, то удаляем первый элемент списка
bulk_data.pop(0)
bulk_data.append(ModuleDataSingleV2)
flag = 1
print('Данные не были отправлены по причине отсутствия сервера в поле видимости')
############################################################################
# PROCESS DATA
############################################################################
async def check_alarm(amplitude: int):
"""
Проверка амплитуды на превышение границы отработки системы.
:param amplitude: Амплитуда.
:return: Превышает/не превышает.
"""
if amplitude > threshold_to_alarm:
return True
else:
return False
async def agregate_data(data_to_agregate: list):
"""
Сбор пакета для отправки на мастер сервер.
:param data_to_agregate: Список из частотных пакетов. Длина списка = количесту обнаруживаемых частот. Может
содержать None, если частота ничего не присылает на модуль сервер.
:return: Пакет данных для отправки на мастер.
"""
data = []
if any(item is not None for item in data_to_agregate):
for item in data_to_agregate:
if item is not None:
item['freq'] = int(item['freq'])
data.append(item)
now = datetime.utcnow() - timedelta(seconds=2)
now = now.strftime("%Y-%m-%d %H:%M:%S")
data = {
"registeredAt": now,
"data": data
}
for i in range(len(freqs)):
data_queue[i] = None
return data
async def sending_data():
#TODO: Надо по-хорошему нормально эту функцию переписать
"""
Отправка пакета данных на мастер сервер раз в некоторое время в определенном формате. Время отправки зависит
от текущего статуса тревоги (аларм/не аларм).
"""
global i
global alarm
global jammer_event
if i == 0:
while True:
i=1
print('while true!')
ModuleDataSingleV2 = await agregate_data(deepcopy(data_queue))
if send_to_master_flag:
print(f'На Мастер будет отправлена следующая информация: {ModuleDataSingleV2}')
await send_to_master(ModuleDataSingleV2, flag)
# Если перед отправкой на мастер все было чисто, то ждем 60 сек.
# Если во время этих 60 сек. пришел пакет с алармом, то рассматриваем ситуации:
if not alarm:
for i in range(passive_interval_to_send, 0, -1):
print('ТАЙМЕР ', i)
await asyncio.sleep(1)
if alarm:
break
# Если стоит флаг отправить данные на джеммер и при этом еще не был получен ивент на глушилку, то
# отправляем на джеммер данные.
elif alarm and send_to_jammer_flag and not jammer_event:
if await send_jam_server_alarm():
print('Отправили на сервис подавления и все дошло успешно')
else:
print('Не смогли отправить на сервис подавления')
# Сюда почему-то не заходит и вообще функция не подает признаков жизни после запуска подваителя((
if alarm and jammer_event:
print('ПОДАВИТЕЛЬ РАБОТАЕТ РАЗБЕГАЙСЯ ААААААААААААААААААА')
# В случае аларма ждем секунду перед новой отправкой данных.
if alarm:
await asyncio.sleep(active_interval_to_send)
i = 0
@app.post('/waterfall')
async def waterfall(data: dict):
print('Received data: ', data)
@app.post('/process_data')
async def process_data(data: dict):
"""
Прием данных со скриптов детекции в формате data = {"freq": freq,
"amplitude": amplitude
}
где freq - строка, amplitude - int и их первичная обработка.
:param data: словарь с двумя ключами и значениями.
"""
global alarm
print('Received data: ', data)
data_dict = deepcopy(data)
# Агрегируем N пакетов данных от частот в один общий список, он используется в функции agregate data.
# Каждая позиция списка фиксируется за отдельной частотой.
freq = data_dict['freq']
for i in range(len(freqs)):
if freq == freqs[i]:
#Так делаем потому, что сервак является центром принятия решений по триггеру.
trigger = await check_alarm(data_dict['amplitude'])
data_dict.update({'triggered': trigger})
data_queue[i] = deepcopy(data_dict)
data_dict.clear()
# Если прилетел триггер и глушилка не включена, то запускаем/обновляем счетчик чистых пакетов на этой
# частоте.
if trigger and not jammer_event:
freqs_alarm[freq] = num_of_clear_packs
print(f'freqs_alarm выглядит следующим обазом: {freqs_alarm}')
#Если прилетел триггер и модуль еще не заалармлен, то алармим.
if trigger and not alarm:
print('Приелет триггерa со сканнера. Работаем, ребята!')
alarm = True
# Если прилетел триггер и модуль заалармлен, но при этом глушилка не работает и счетчик чистых пакетов
# данной частоты не равен нулю, то уменьшаем его. А когда в словаре счетчиков все нули, то убираем alarm.
elif not trigger and alarm and not jammer_event and freqs_alarm[freq] != 0:
freqs_alarm[freq] -= 1
print(f'Чистый пакет. Уменьшаем в выбранной частоте: {freqs_alarm}')
if all(value == 0 for value in freqs_alarm.values()):
alarm = False
print(f'Прилетело {num_of_clear_packs}. Отключаем аларм и freqs_alarm выглядит так: {freqs_alarm}')
else:
continue
print('После получения данных data_queue выглядит следующим образом: ', data_queue)
############################################################################
# JAMMER
############################################################################
async def jammer_active():
"""
Включение подавителя.
Отменяем таску на отправку данных на мастер. Зануляем словарь чистых пакетов и объявляем о прилете ивента с сервиса
подавления.
"""
global jammer_event
global freqs_alarm
global sending_data_task
if sending_data_task is not None:
sending_data_task.cancel()
freqs_alarm = {freq: 0 for freq in freqs}
jammer_event = True
print('АКТИВИРУЕМ ПОДАВИТЕЛЬ ААААААААААААААААААААААААААААААААААААААААААААААА!!!!')
print('-' * 20)
print('Статус по переменным:')
print(f'freqs_alarm: {freqs_alarm}')
print(f'jammer_event: {jammer_event}')
print(f'alarm: {alarm}')
print('-' * 20)
async def jammer_deactive():
"""
Отключение подавителя.
Отрубаем аларм на модуле, отрубаем ивент сервера подавителей и запускаем таску отправки данных на мастер.
:return:
"""
global jammer_event
global alarm
global sending_data_task
alarm = False
jammer_event = False
sending_data_task = asyncio.create_task(sending_data())
print('ОТКЛЮАЕМ ПОДАВИТЕЛЬ ААААААААААААААААААААААААААААААААААААААААААААААААА!!!!')
print('-' * 20)
print('Статус по переменным:')
print(f'freqs_alarm: {freqs_alarm}')
print(f'jammer_event: {jammer_event}')
print(f'alarm: {alarm}')
print('-' * 20)
async def send_jam_server_alarm():
"""
Отправка алармовского пакета на сервер подавления по вебсокету. На отправку дается jammer_timeout секунд.
При неудаче - данные не отправлены.
"""
global jam_server_connect
msg = {'type': 'freq_alarm',
'data': True}
if jam_server_connect:
try:
await jam_server_connect.send(json.dumps(msg))
await asyncio.wait_for(jam_server_connect.recv(), jammer_timeout)
return True
except (asyncio.TimeoutError, websockets.exceptions.WebSocketException) as e:
print(f"WebSocket error or timeout: {e}")
return False
else:
return False
async def jam_server():
"""
Прием данных по вебсокету с сервера подавления и их обработка. Включение/отключение подавителя.
При разрыве соединения принудительно отключаем подавитель.
"""
uri = f'ws://{jamhost}:{jamport}/ws'
global jam_server_connect
while True:
try:
jam_server_connect = await websockets.connect(uri)
while True:
data_from_jam_server = await jam_server_connect.recv()
data_from_jam_server = json.loads(data_from_jam_server)
print('Принял с сервера глушилок: ', data_from_jam_server)
if data_from_jam_server['type'] == 'run':
alarm_status = (data_from_jam_server['data'])['state']
print(alarm_status)
if alarm_status:
await jammer_active()
else:
await jammer_deactive()
except Exception as e:
jam_server_connect = None
if jammer_event:
await jammer_deactive()
@app.on_event("startup")
async def startup_event():
"""
Запускаем параллельно задачи jam_server и sending_data.
"""
global sending_data_task
asyncio.create_task(jam_server())
sending_data_task = asyncio.create_task(sending_data())
if __name__ == '__main__':
import uvicorn
# update_gps_coordinates()
register_module() # Регистрация модуля на сервере
uvicorn.run(app, host=lochost, port=int(locport))

@ -0,0 +1,314 @@
import json
import os
import asyncio
import websockets
from common.runtime import load_root_env
from typing import List
from fastapi import FastAPI, WebSocket, WebSocketDisconnect, HTTPException
load_root_env(__file__)
threshold_to_alarm = int(os.getenv('threshold_to_alarm'))
time_to_jam = int(os.getenv('time_to_jam'))
time_to_fresh = int(os.getenv('time_to_fresh'))
lochost = os.getenv('lochost')
locport = int(os.getenv('locport'))
jamhost = os.getenv('jamhost')
jamport = os.getenv('jamport')
# TODO Добавить обработку кнопки и водопад.
class FreqConfig:
"""
Конфиг частот, отображаемых на планшете.
Атрибуты:
freq_config: Словарь с частотами и преднастройкой (вкл/выкл).
"""
def __init__(self):
self.freq_config = {
'433': False,
'500': False,
'700': True,
'868': True,
'915': True,
'1200': True,
'1500': False,
'2400': True,
'5200': True,
'5800': True
}
def get(self):
return self.freq_config
def get_status(self, freq):
"""
Проверка активности выбранной частоты.
:param freq: Частота для проверки.
:return: Вкл/выкл.
"""
return self.freq_config[freq]
def set_active(self, freq: str, status: bool):
"""
Переключение частоты в состояние вкл/выкл.
:param freq: Частота для отключения/включения.
:param status: Положение, в которое переключается частота (True - вкл, False - выкл).
:return: None.
"""
self.freq_config[freq] = status
app = FastAPI()
websocket_connections: List[WebSocket] = []
jam_server_connect = None
alarm = False
freqconfig = FreqConfig()
def check_active_tablets() -> None:
"""
Проверка активных соединений с каким-либо планшетом.
"""
if not websocket_connections:
raise HTTPException(status_code=400, detail="No active WebSocket connections = No tablets in sight.")
async def send_to_tablets(package) -> None:
"""
Рассылка данных по планшетам.
:param package: Собранный пакет данных для отправки.
"""
global alarm
if not websocket_connections:
print('Нет подключенных планшетов/клиентов.')
else:
for websocket in websocket_connections:
try:
print('Пытаемся отправить данные клиенту ', websocket)
await websocket.send_json(package)
print(f'Пакет {package} успешно отправлен.')
except Exception as e:
print(f"Не смогли отправить данные клиенту по вебсокету: {e}")
async def check_alarm(amplitude: int):
if amplitude > threshold_to_alarm:
return await send_jam_server_alarm()
else:
return False
# async def freq_active(freq: str):
# """
# Запуск скрина с частотой после ее активации в частотном конфиге.
# :param freq: Частота для запуска скрина.
# """
#
# # TODO добавить чек частоты в скринах, а то вдруг мы запускаем скрин, а он уже запущен.
# print(f'АКТИВИРУЕМ ЧАСТОТУ {freq}')
# command = f"screen -dmS {freq} sh -c \"export PYTHONPATH=/home/orangepi && bash -c \'python3 " \
# f"/home/orangepi/DroneScanner/src/main_5800.py\' && exec bash\""
# screen = subprocess.run(command, shell=True, capture_output=True, text=True)
#
# # Проверяем результат выполнения
# if screen.returncode == 0:
# print("Команда успешно выполнена")
# else:
# print("Ошибка при выполнении команды")
# print("STDERR:", screen.stderr)
#
#
# async def freq_deactive(freq: str):
# """
# Килл скрин с частотой после ее деактивации в частотном конфиге.
# :param freq: Частота для скрин килл.
# """
#
# print(f'ОТКЛЮЧАЕМ ЧАСТОТУ {freq}')
@app.post('/send-waterfall/')
async def send_waterfall(data: dict) -> None:
"""
Прием водопада и отсылка на планшеты, если необходимо.
:param data: Водопад в виде пакета data = {...}
"""
print('bubble tea!')
async def set_freq_config(data: dict):
"""
Переключение состояний частот пришедших с одного планшета и рассылка этой информации на остальные.
Запуск/отключение необходимых скринов.
:param data: Словарь вида {частота: состояние}
:return: None.
"""
for freq, activ in data.items():
freqconfig.set_active(freq, activ)
print(f'Частота {freq} перешла в состояние {activ}: {freqconfig.get_status(freq)}')
msg = {'type': 'freq_config',
'data': {freq: activ}}
await send_to_tablets(msg)
# if activ:
# await freq_active(freq)
#
# else:
# await freq_deactive(freq)
@app.post("/process_data")
async def send_data(data: dict):
"""
Прием данных со скриптов детекции в формате data = {"freq": freq,
"amplitude": amplitude
}
где freq - строка, amplitude - int, их обработка и рассылка по планшетам.
:param data: Словарь.
"""
global alarm
global jam_server_connect
# check_active_tablets()
print('На сервер пришли данные: ', data)
if alarm and jam_server_connect:
print('Подавитель активен.')
return {'message': 'Подавитель активен.'}
elif alarm and jam_server_connect is None:
alarm = False
if not freqconfig.get_status(data['freq']):
print('Частота выключена.')
return {'message': 'Частота выключена.'}
if await check_alarm(data['amplitude']):
print('Затриггерились')
msg = {'type': 'freq',
'data': data
}
await send_to_tablets(msg)
@app.websocket("/ws")
async def websocket_endpoint(websocket: WebSocket) -> None:
"""
Прием данных (freq config) по вебсокету от клиента (планшетов) и их обработка.
При подключении к серверу - отсылка на клиента текущее состояние частотного конфига.
:param websocket:
:return:
"""
await websocket.accept()
websocket_connections.append(websocket)
msg = {'type': 'freq_config',
'data': freqconfig.get()
}
await websocket.send_json(msg)
try:
while True:
try:
data_from_client = await websocket.receive_json() # Ожидание получения данных от клиента
if 'freq' in data_from_client:
data = data_from_client['freq']
print('Приняли с планшета: ', data)
await set_freq_config(data)
except WebSocketDisconnect:
print("Client disconnected")
websocket_connections.remove(websocket)
break
except Exception as e:
print(f"Error receiving data: {e}")
except WebSocketDisconnect:
websocket_connections.remove(websocket)
# print(e)
# print(f"Client disconnected: {e.code} - {e.reason}")
##################################################################################
# Подключение к серверу глушилок, как клиент по вебсокету и обработка информации.
##################################################################################
async def jammer_active():
"""
Активируем подавитель.
"""
global alarm
print('АКТИВИРУЕМ ПОДАВИТЕЛЬ!!!!')
alarm = True
async def jammer_deactive():
"""
Отключаем подавитель.
"""
global alarm
print('ОТКЛЮАЕМ ПОДАВИТЕЛЬ!!!!')
alarm = False
async def send_jam_server_alarm():
"""
Отправить пакет с триггером на сервер подавителей.
:return: True, если соединение с сервером активно и данные отправлены успешно. False - иначе.
"""
global jam_server_connect
msg = {'type': 'freq_alarm',
'data': True}
if jam_server_connect:
await jam_server_connect.send(json.dumps(msg))
return True
else:
return False
async def jam_server():
"""
Подключиться к серверу подавителей по вебсокету как клиент. Получение и обработка пакетов - активация/деактивация
подавителя.
"""
uri = f'ws://{jamhost}:{jamport}/ws'
global jam_server_connect
while True:
try:
jam_server_connect = await websockets.connect(uri)
while True:
data_from_jam_server = await jam_server_connect.recv()
data_from_jam_server = json.loads(data_from_jam_server)
print('Принял с сервера глушилок: ', data_from_jam_server)
if data_from_jam_server['type'] == 'run':
alarm_status = (data_from_jam_server['data'])['state']
print(alarm_status)
if alarm_status:
await jammer_active()
else:
await jammer_deactive()
except Exception as e:
jam_server_connect = None
if alarm:
await jammer_deactive()
@app.on_event("startup")
async def startup_event():
"""
Запуск подключения к серверу подавления и получения от него сообщений.
"""
asyncio.create_task(jam_server())
##################################################################################
# Запуск приложения.
##################################################################################
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host=lochost, port=locport)

@ -0,0 +1,108 @@
import os
import datetime
from smb.SMBConnection import SMBConnection
from dotenv import load_dotenv
from DroneScanner.utils.datas_processing import pack_elems, agregator, send_data, save_data, remote_save_data
from DroneScanner.core.sig_n_medi_collect import Signal, SignalsArray, get_signal_length
from DroneScanner.core.multichannelswitcher import MultiChannel, get_centre_freq
dotenv_path = os.path.join(os.path.dirname(__file__), '../../.env')
if os.path.exists(dotenv_path):
load_dotenv(dotenv_path)
debug_flag = bool(os.getenv('debug_flag'))
send_to_module_flag = bool(os.getenv('send_to_module_flag'))
save_data_flag = bool(os.getenv('save_data_flag'))
module_name = os.getenv('module_name')
elems_to_save = os.getenv('elems_to_save')
file_types_to_save = os.getenv('file_types_to_save')
localhost = os.getenv('lochost')
localport = os.getenv('locport')
f_step = [*map(float, os.getenv('f_step_1200').split())]
f_bases = [*map(float, os.getenv('f_bases_1200').split())]
f_roofs = [*map(float, os.getenv('f_roofs_1200').split())]
path_to_save_medians = os.getenv('path_to_save_medians')
path_to_save_alarms = os.getenv('path_to_save_alarms')
smb_host = os.getenv('smb_host')
smb_port = os.getenv('smb_port')
smb_user = os.getenv('smb_user')
smb_pass = os.getenv('smb_pass')
shared_folder = os.getenv('shared_folder')
the_pc_name = os.getenv('the_pc_name')
remote_pc_name = os.getenv('remote_pc_name')
smb_domain = os.getenv('smb_domain')
elems_to_save = elems_to_save.split(',')
file_types_to_save = file_types_to_save.split(',')
tmp_signal = Signal()
tmp_sigs_array = SignalsArray()
multi_channel = MultiChannel(f_step, f_bases, f_roofs)
f = multi_channel.init_f()
multi_channel.fill_DB()
if debug_flag:
conn = SMBConnection(smb_user, smb_pass, the_pc_name, remote_pc_name, use_ntlm_v2=True)
conn.connect(smb_host, 139)
filelist = conn.listPath(shared_folder, '/')
print(filelist)
def work(lvl):
f = multi_channel.get_cur_channel()
freq = get_centre_freq(f)
signal_length = get_signal_length(freq)
median, signal, abs_signal = tmp_signal.fill_sig(lvl, signal_length)
if median != -1:
try:
num_chs, circle_buffer = multi_channel.check_f(f)
#print(f, freq, num_chs, signal_length)
#print(median)
cur_channel, sigs_array = tmp_sigs_array.fill_sig_arr(median, num_chs)
if sigs_array:
print('Значения на {0}: {1}'.format(freq, sigs_array))
print('Пороги: ', circle_buffer.get_medians())
alarm = circle_buffer.check_alarm(sigs_array)
if alarm:
print('----ALARM---- ', freq)
multi_channel.db_alarms_zeros(circle_buffer)
else:
circle_buffer.update(sigs_array)
if send_to_module_flag:
send_data(agregator(freq, alarm), localhost, localport)
if save_data_flag:
if not circle_buffer.check_init() and circle_buffer.current_column - 1 == 0:
save_data(path_to_save_medians, freq, 'DateTime', 'ALARM', 'max signal', list(range(num_chs)),
list(range(num_chs)))
if circle_buffer.check_init():
save_data(path_to_save_medians, freq, datetime.datetime.now(), alarm, max(sigs_array), sigs_array,
circle_buffer.get_medians())
# print(circle_buffer.get_buffer())
# print(circle_buffer.get_medians())
# print(circle_buffer.get_alarms())
if debug_flag:
single_alarm = circle_buffer.check_single_alarm(median, cur_channel)
print(cur_channel, single_alarm)
if single_alarm:
data = pack_elems(elems_to_save, file_types_to_save, signal, abs_signal)
#print('SAVE CURRENT SIGNAL SROCHNO TI MENYA SLISHISH?!?!?!?')
try:
remote_save_data(conn, data, module_name, freq, shared_folder, path_to_save_alarms)
except Exception as e:
print(f"Ошибка: {e}")
#else:
#print('VSE OKI DOKI SIGNAL SOKHRANYAT NE NUZHNO!!!')
f = multi_channel.change_channel()
except Exception as e:
print(str(e))
print(".", end='')
return f

@ -0,0 +1,276 @@
# this module will be imported in the into your flowgraph
import numpy as np
import os
import platform
from rknnlite.api import RKNNLite
import wiringpi as wpi
from wiringpi import GPIO
import requests
import json
from dotenv import load_dotenv
dotenv_path = os.path.join(os.path.dirname(__file__), '../../.env')
load_dotenv(dotenv_path)
##############################
# wiringPi
##############################
wpi.wiringPiSetup()
##############################
# RKNN
##############################
DEVICE_COMPATIBLE_NODE = '/proc/device-tree/compatible'
RK3588_MODEL = {
'path': os.getenv('path_to_NN'),
'model': None,
'split_size': 50_000,
'N_predictions': 40,
'N_samples_confidence_threshold': 0.65,
}
RK3588_MODEL['model'] = RKNNLite()
try:
ret1 = RK3588_MODEL['model'].load_rknn(RK3588_MODEL['path'])
assert(ret1 == 0)
except Exception as e:
print(e)
ret2 = RK3588_MODEL['model'].init_runtime(core_mask=RKNNLite.NPU_CORE_0)
assert(ret2 == 0)
##############################
# HYPERPARAMETERS
##############################
f_base = 2.48e9
f_step = -10e6
f_roof = 2.4e9
signal_num = 10
#signal_dir = '/home/orangepi/CDD/Complex_DroneDetection/gnuradio/signal/'
#if not os.path.exists(signal_dir):
# os.mkdir(signal_dir)
reading_signal_delay = 0
iterations = 3 # read signal iterations
height_threshold = 100
weak_avg_amount = 70
weak_samples_confidence = 0.50
#classes = {0: 'noise', 1: 'DJI', 2: 'other', 3: 'for weak'}
classes = {0: 'noise', 1: 'DJI_video', 2: 'DJI_control', 3: 'WIFI'}
#pins = [11, 4, 3, 14, 12, 0, 1, 2, 5, 7]
pins = [11, 4, 3, 14, 12, 0, 1, 2, 5, 7]
on_state = 0
off_state = 1
p2p_border = np.array([0.05, 0.065, 0.08, 0.95, 0.115, 0.125, 0.135, 0.185, 0.25, 0.3])
amp_border = np.array([0, 0.06, 0.07, 0.075, 0.08, 0.085, 0.09, 0.095, 0.10, 0.11, 0.12, 0.13, 0.14, 0.15, 0.16, 0.17, 0.18, 0.19, 0.20])
#amp_decays = np.array([0, -0.15, -0.14, -0.13, -0.12, -0.11, -0.10,-0.09,-0.08,-0.07,-0.06,-0.05,-0.04,-0.03,-0.02,-0.01, -0.009, -0.008, -0.007])
amp_decays = np.array([0, -0.07, -0.0675, -0.065, -0.0625, -0.06, -0.05,-0.045,-0.04,-0.035,-0.03,-0.025,-0.02,-0.015,-0.01,-0.008, -0.004, -0.002, -0.001])
assert(len(amp_border) == len(amp_decays))
amp_slice_size = 50000
pred_amps = []
##############################
# Variables
##############################
running_amp = 0
weak_detected = 0
counter = 0
ctrs = {0: 0, 1: 0, 2: 0, 3: 0}
avg_probs = {0: 0., 1: 0., 2: 0., 3: 0.}
avg_amps = {0: 0., 1: 0., 2: 0., 3: 0.}
max_amp = 0
max_freq = 0
it = 0 # current reading flag
f = f_base # local frequency
EOCF = 0 # End of changing frequency flag
signal_arr = []
weak_avg_ctr = 0
weak_ctr = 0
avg_confidence = 0
strong_confidence_threshold = 0.6
weak_confidence_threshold = 0.85
current_pin = 0
vals = []
############################### support functions
##############################
def calc_running_amp(sig):
global amp_slice_size
running_amp = np.average(np.sort(np.abs(sig).flatten())[::-1][:amp_slice_size])
#print(running_amp)
return running_amp
def softmax(x):
"""Compute softmax values for each sets of scores in x."""
e_x = np.exp(x - np.max(x))
return e_x / e_x.sum()
def compute_signal_distance(signal: np.array) -> float:
"""
Computes a peak to peak distanse of the input signal.
Arguments:
signal, np.array - an array of complex points of the input signal
Returns:
sig_dist, float - peak to peak amplitude signal distance
"""
# sig_dist = np.max(np.abs(signal)) - np.min(np.abs(signal))
sig_dist = np.abs(np.max(signal) - np.min(signal))
return sig_dist
def send_data(sig_dist):
len_threshold = int(os.getenv('len_threshold'))
localhost = os.getenv('lochost')
localport = os.getenv('locport')
length = int(np.sum(np.where(p2p_border <= sig_dist, 1, 0)))
if length >= len_threshold:
trigger = True
else:
trigger = False
data_to_send = {
"freq": '2400',
"amplitude": length
# "triggered": trigger,
# "light_len": length
}
response = requests.post("http://{0}:{1}/send-freq/".format(localhost, localport), json=data_to_send)
if response.status_code == 200:
print("Данные успешно отправлены и приняты!")
else:
print("Ошибка при отправке данных: ", response.status_code)
##############################
# main function
##############################
def work(lvl):
global f_base
global f_step
global f_roof
global signal_tag
global reading_signal_delay
global iterations
global max_amp
global max_freq
global it
global f
global EOCF
global strong_model
global weak_model
global weak_ctr
global weak_avg_ctr
global avg_confidence
global strong_confidence_threshold
global weak_confidence_threshold
global weak_samples_confidence
global height_threshold
global signal_arr
global ctrs
global avg_probs
global avg_amps
global amp_border
global amp_decays
global classes
global pred_amps
global weak_detected
global vals
outputs = []
y = np.array(lvl).ravel()
signal_arr = np.concatenate((signal_arr, y), axis=None)
if f <= f_roof:
f = f_base
signal_arr = []
send_data(np.max(np.array(vals)))
vals = []
return f, EOCF
else:
label = None
if len(signal_arr) >= RK3588_MODEL['split_size']: # the signal length `soft` constraint
sig = np.array([signal_arr.real[0:RK3588_MODEL['split_size']], signal_arr.imag[0:RK3588_MODEL['split_size']]], dtype=np.float32)
running_amp = calc_running_amp(sig)
# feeds the input signal into weak classifier
outputs = RK3588_MODEL['model'].inference(inputs=[sig])
signal_arr = []
label = np.argmax(outputs, axis=2)[0][0]
probability = softmax(outputs[0][0])[label]
#print(classes[label], round(probability, 2), int(f))
weak_ctr += 1
if (label != 0) and (label != 3) and probability > weak_confidence_threshold:
weak_avg_ctr += 1
ctrs[label] += 1
avg_probs[label] += probability
avg_amps[label] += running_amp
if weak_ctr == RK3588_MODEL['N_predictions']:
prob = round(softmax(outputs[0][0])[label], 2)
#print('Detected: ', classes[label], ' Probability: ' , prob, 'Frequency: ', str(f))
print('---------> Frequency: ', f)
for key in avg_probs.keys():
avg_probs[key] = float(avg_probs[key] / max(1, ctrs[key]))
avg_amps[key] = float(avg_amps[key] / max(1, ctrs[key]))
for key in ctrs.keys():
print("avg prob: ", "%.4f" % avg_probs[key],"avg_amp: ", "%.4f" % avg_amps[key],"ctr: ", ctrs[key],"class: ", classes[key])
#sfm = softmax(list(avg_probs.values()))
#print(sfm)
label = np.argmax(list(ctrs.values()))
weak_avg_ctr = ctrs[label]
weak_avg_prob = avg_probs[label] # / max(1, ctrs[label])
weak_avg_amp = avg_amps[label]
amp_decay = 0 #amp_decays[min(np.sum(np.where(amp_border <= weak_avg_amp, 1, 0)), len(amp_border) - 1)]
final_confidence_threshold = weak_confidence_threshold + amp_decay
print('-' * 50, classes[label], weak_avg_ctr,"%.4f" % float(weak_avg_prob), '/', "%.4f" % final_confidence_threshold)
#print(classes[label] , ': ', sfm[label])
avg_probs = {0: 0., 1: 0., 2: 0., 3: 0.}
avg_amps = {0: 0., 1: 0., 2: 0., 3: 0.}
ctrs = {0: 0, 1: 0, 2: 0, 3: 0}
if weak_avg_ctr >= RK3588_MODEL['N_predictions'] * RK3588_MODEL['N_samples_confidence_threshold'] and label != 0 and weak_avg_prob > final_confidence_threshold:
print('!!!' * 30, f)
vals.append(weak_avg_amp)
else:
vals.append(0)
f += f_step
weak_ctr = 0
weak_avg_ctr = 0
return f, EOCF

@ -0,0 +1,109 @@
import os
import datetime
from smb.SMBConnection import SMBConnection
from dotenv import load_dotenv
from DroneScanner.utils.datas_processing import pack_elems, agregator, send_data, save_data, remote_save_data
from DroneScanner.core.sig_n_medi_collect import Signal, SignalsArray, get_signal_length
from DroneScanner.core.multichannelswitcher import MultiChannel, get_centre_freq
dotenv_path = os.path.join(os.path.dirname(__file__), '../../.env')
if os.path.exists(dotenv_path):
load_dotenv(dotenv_path)
debug_flag = bool(os.getenv('debug_flag'))
send_to_module_flag = bool(os.getenv('send_to_module_flag'))
save_data_flag = bool(os.getenv('save_data_flag'))
module_name = os.getenv('module_name')
elems_to_save = os.getenv('elems_to_save')
file_types_to_save = os.getenv('file_types_to_save')
localhost = os.getenv('lochost')
localport = os.getenv('locport')
f_step = [*map(float, os.getenv('f_step_915').split())]
f_bases = [*map(float, os.getenv('f_bases_915').split())]
f_roofs = [*map(float, os.getenv('f_roofs_915').split())]
path_to_save_medians = os.getenv('path_to_save_medians')
path_to_save_alarms = os.getenv('path_to_save_alarms')
smb_host = os.getenv('smb_host')
smb_port = os.getenv('smb_port')
smb_user = os.getenv('smb_user')
smb_pass = os.getenv('smb_pass')
shared_folder = os.getenv('shared_folder')
the_pc_name = os.getenv('the_pc_name')
remote_pc_name = os.getenv('remote_pc_name')
smb_domain = os.getenv('smb_domain')
elems_to_save = elems_to_save.split(',')
file_types_to_save = file_types_to_save.split(',')
tmp_signal = Signal()
tmp_sigs_array = SignalsArray()
multi_channel = MultiChannel(f_step, f_bases, f_roofs)
f = multi_channel.init_f()
multi_channel.fill_DB()
if debug_flag:
conn = SMBConnection(smb_user, smb_pass, the_pc_name, remote_pc_name, use_ntlm_v2=True)
conn.connect(smb_host, 139)
filelist = conn.listPath(shared_folder, '/')
print(filelist)
def work(lvl):
f = multi_channel.get_cur_channel()
freq = get_centre_freq(f)
signal_length = get_signal_length(freq)
median, signal, abs_signal = tmp_signal.fill_sig(lvl, signal_length)
if median != -1:
try:
num_chs, circle_buffer = multi_channel.check_f(f)
#print(f, freq, num_chs)
#print(median)
cur_channel, sigs_array = tmp_sigs_array.fill_sig_arr(median, num_chs)
if sigs_array:
print('Значения на {0}: {1}'.format(freq, sigs_array))
print('Пороги: ', circle_buffer.get_medians())
alarm = circle_buffer.check_alarm(sigs_array)
if alarm:
print('----ALARM---- ', freq)
multi_channel.db_alarms_zeros(circle_buffer)
else:
circle_buffer.update(sigs_array)
if send_to_module_flag:
send_data(agregator(freq, alarm), localhost, localport)
if save_data_flag:
if not circle_buffer.check_init() and circle_buffer.current_column - 1 == 0:
save_data(path_to_save_medians, freq, 'DateTime', 'ALARM', 'max signal', list(range(num_chs)),
list(range(num_chs)))
if circle_buffer.check_init():
save_data(path_to_save_medians, freq, datetime.datetime.now(), alarm, max(sigs_array), sigs_array,
circle_buffer.get_medians())
# print(circle_buffer.get_buffer())
# print(circle_buffer.get_medians())
# print(circle_buffer.get_alarms())
if debug_flag:
single_alarm = circle_buffer.check_single_alarm(median, cur_channel)
print(cur_channel, single_alarm)
if single_alarm:
data = pack_elems(elems_to_save, file_types_to_save, signal, abs_signal)
print('SAVE CURRENT SIGNAL SROCHNO TI MENYA SLISHISH?!?!?!?')
try:
remote_save_data(conn, data, module_name, freq, shared_folder, path_to_save_alarms)
except Exception as e:
print(f"Ошибка: {e}")
else:
print('VSE OKI DOKI SIGNAL SOKHRANYAT NE NUZHNO!!!')
f = multi_channel.change_channel()
except Exception as e:
print(str(e))
print(".", end='')
return f

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save