.
This commit is contained in:
8
.gitignore
vendored
Normal file
8
.gitignore
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
/.idea
|
||||||
|
/venv
|
||||||
|
.pytest_cache/
|
||||||
|
dist/
|
||||||
|
build/
|
||||||
|
__pycache__/
|
||||||
|
*.pyc
|
||||||
|
*.egg-info/
|
||||||
119
.gitlab-ci.yml
Normal file
119
.gitlab-ci.yml
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
---
|
||||||
|
image: $BUILDER_IMAGE
|
||||||
|
|
||||||
|
variables:
|
||||||
|
project: "xmlrpcserver"
|
||||||
|
|
||||||
|
stages:
|
||||||
|
- init
|
||||||
|
# - lint
|
||||||
|
# - test
|
||||||
|
- build
|
||||||
|
- pack
|
||||||
|
- upload
|
||||||
|
- deploy
|
||||||
|
- deploy_qa
|
||||||
|
|
||||||
|
include:
|
||||||
|
- project: "ipd/infra"
|
||||||
|
file: ".version.yml"
|
||||||
|
- project: "ipd/infra"
|
||||||
|
file: ".python_lint.yml"
|
||||||
|
- project: "ipd/infra"
|
||||||
|
file: ".python_jobs.yml"
|
||||||
|
- project: "ipd/infra"
|
||||||
|
file: ".ansistrano-cd.yml"
|
||||||
|
|
||||||
|
#lint:
|
||||||
|
# variables:
|
||||||
|
# project: "journal"
|
||||||
|
# extends: .python_lint
|
||||||
|
# allow_failure: false
|
||||||
|
# only:
|
||||||
|
# refs:
|
||||||
|
# - merge_requests
|
||||||
|
# changes:
|
||||||
|
# - "**/*.py"
|
||||||
|
|
||||||
|
#pylint:
|
||||||
|
# variables:
|
||||||
|
# project: "journal"
|
||||||
|
# extends: .pylint
|
||||||
|
# allow_failure: false
|
||||||
|
# only:
|
||||||
|
# refs:
|
||||||
|
# - merge_requests
|
||||||
|
# changes:
|
||||||
|
# - "**/*.py"
|
||||||
|
|
||||||
|
#pytest:
|
||||||
|
# stage: test
|
||||||
|
# tags: [docker]
|
||||||
|
# image: $REGISTRY_ADDRESS/ipd/$CI_PROJECT_NAME:builder
|
||||||
|
# services:
|
||||||
|
# - name: $DOCKER_postgis
|
||||||
|
# alias: postgres
|
||||||
|
# - name: $DOCKER_queue
|
||||||
|
# alias: rabbitmq
|
||||||
|
# variables:
|
||||||
|
# ENV_FOR_DYNACONF: testing
|
||||||
|
# POSTGRES_PASSWORD: postgres
|
||||||
|
# POSTGRES_HOST: postgres
|
||||||
|
# POSTGRES__PGBOUNCER: ""
|
||||||
|
# AMQP__LOGIN: user
|
||||||
|
# AMQP__PASSWORD: user
|
||||||
|
# before_script:
|
||||||
|
# - apt install python -y
|
||||||
|
# - pip3 install -i $PYPI_REGISTRY --force-reinstall "poetry==1.4.2"
|
||||||
|
# - export PATH=$(pwd)/.venv/bin:$PATH
|
||||||
|
# - rm poetry.lock || true
|
||||||
|
# - poetry config virtualenvs.in-project true
|
||||||
|
# - poetry config repositories.nexus $PYPI_REGISTRY
|
||||||
|
# - poetry source add nexus https://nexus.sitronics-kt.dev/repository/pypi/simple/ --default
|
||||||
|
# - poetry install
|
||||||
|
# script:
|
||||||
|
# - poetry env info
|
||||||
|
# - poetry run coverage run --concurrency=thread --source=. -m pytest tests --junitxml=report.xml
|
||||||
|
# - poetry run coverage report
|
||||||
|
# - poetry run genbadge tests -i - < report.xml -o - > badge.svg
|
||||||
|
# allow_failure: false
|
||||||
|
# coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
|
||||||
|
# only:
|
||||||
|
# refs:
|
||||||
|
# - devel
|
||||||
|
# - merge_requests
|
||||||
|
# artifacts:
|
||||||
|
# when: always
|
||||||
|
# paths:
|
||||||
|
# - badge.svg
|
||||||
|
# reports:
|
||||||
|
# junit: report.xml
|
||||||
|
|
||||||
|
docker_builder_build:
|
||||||
|
extends: .builder_docker_image_dnd
|
||||||
|
|
||||||
|
make_dist:
|
||||||
|
extends: .dist_make_new
|
||||||
|
|
||||||
|
docker_build:
|
||||||
|
extends: .docker_build
|
||||||
|
|
||||||
|
get_version:
|
||||||
|
stage: upload
|
||||||
|
tags: [docker]
|
||||||
|
script:
|
||||||
|
- buildVersion=$(cat ./VERSION)
|
||||||
|
- echo "serviceVersion=$buildVersion" >> buildVersion.env
|
||||||
|
- echo $buildVersion
|
||||||
|
only:
|
||||||
|
refs:
|
||||||
|
- merge_requests
|
||||||
|
artifacts:
|
||||||
|
reports:
|
||||||
|
dotenv: buildVersion.env
|
||||||
|
|
||||||
|
#deploy_qa_autotest:
|
||||||
|
# extends: .deploy_compose_qa
|
||||||
|
# variables:
|
||||||
|
# GEODATA_TAG: test
|
||||||
|
# serviceVersion: $serviceVersion
|
||||||
1301
poetry.lock
generated
Normal file
1301
poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
53
pyproject.toml
Normal file
53
pyproject.toml
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
[tool.poetry]
|
||||||
|
name = "xmlrpcserver"
|
||||||
|
version = "0.1.2"
|
||||||
|
description = ""
|
||||||
|
authors = ["Sitronics <info@sitroincs-kt.ru>"]
|
||||||
|
readme = "README.md"
|
||||||
|
|
||||||
|
[tool.poetry.dependencies]
|
||||||
|
python = "^3.8"
|
||||||
|
aiohttp = "3.8.6"
|
||||||
|
aiosignal = "1.3.1"
|
||||||
|
annotated-types = "0.5.0"
|
||||||
|
anyio = "3.7.1"
|
||||||
|
async-timeout = "4.0.3"
|
||||||
|
attrs = "23.1.0"
|
||||||
|
backoff = "2.2.1"
|
||||||
|
boto3 = "1.26.76"
|
||||||
|
botocore = "1.29.76"
|
||||||
|
certifi = "2023.7.22"
|
||||||
|
charset-normalizer = "3.3.2"
|
||||||
|
click = "8.1.7"
|
||||||
|
fastapi = "0.92.0"
|
||||||
|
frozenlist = "1.4.0"
|
||||||
|
gql = "3.4.1"
|
||||||
|
graphql-core = "3.2.1"
|
||||||
|
greenlet = "2.0.0"
|
||||||
|
h11 = "0.14.0"
|
||||||
|
idna = "3.4"
|
||||||
|
jmespath = "1.0.1"
|
||||||
|
multidict = "6.0.4"
|
||||||
|
pika = "1.3.2"
|
||||||
|
pika-stubs = "0.1.3"
|
||||||
|
psycopg = "3.1.10"
|
||||||
|
pydantic = "1.10.13"
|
||||||
|
pydantic_core = "2.6.3"
|
||||||
|
pygost = { path = "./xmlrpcserver/deps/pygost-5.12", develop = true }
|
||||||
|
python-dateutil = "2.8.2"
|
||||||
|
python-multipart = "0.0.6"
|
||||||
|
requests = "2.31.0"
|
||||||
|
s3transfer = "0.6.2"
|
||||||
|
six = "1.16.0"
|
||||||
|
sniffio = "1.3.0"
|
||||||
|
SQLAlchemy = "2.0.20"
|
||||||
|
typing_extensions = "4.8.0"
|
||||||
|
urllib3 = "1.26.18"
|
||||||
|
uvicorn = "0.20.0"
|
||||||
|
yarl = "1.9.2"
|
||||||
|
deb_structurer = "*"
|
||||||
|
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools", "poetry_core>=1.0"]
|
||||||
|
build-backend = "poetry.core.masonry.api"
|
||||||
0
request_itv/__init__.py
Normal file
0
request_itv/__init__.py
Normal file
279
request_itv/__main__.py
Normal file
279
request_itv/__main__.py
Normal file
@@ -0,0 +1,279 @@
|
|||||||
|
from tempfile import TemporaryDirectory
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
import pika
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
from queue import Queue
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
from uuid import uuid4, UUID
|
||||||
|
from xmlrpc.server import SimpleXMLRPCServer
|
||||||
|
from xmlrpc.client import ServerProxy
|
||||||
|
import logging
|
||||||
|
import io
|
||||||
|
import zlib
|
||||||
|
import os.path
|
||||||
|
import requests
|
||||||
|
from .reqs_graphql import get_catalog, get_object
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from .reqs.request_xml_service import RequestXmlService
|
||||||
|
import zipfile
|
||||||
|
from .config import Config
|
||||||
|
from .zip import Zip
|
||||||
|
import boto3
|
||||||
|
import request_itv.db as db
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from fastapi import FastAPI, Response, Form, UploadFile, File, Request
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
import uvicorn
|
||||||
|
from typing_extensions import Annotated
|
||||||
|
import pathlib
|
||||||
|
from shutil import make_archive
|
||||||
|
|
||||||
|
|
||||||
|
tasks = Queue()
|
||||||
|
|
||||||
|
connection: Optional[pika.BlockingConnection] = None
|
||||||
|
channel: Optional[Any] = None
|
||||||
|
server: Optional[SimpleXMLRPCServer] = None
|
||||||
|
|
||||||
|
logger = logging.getLogger('xmlrpcserver')
|
||||||
|
|
||||||
|
|
||||||
|
def s3_connection():
|
||||||
|
return boto3.client('s3', endpoint_url=Config.s3_endpoint,
|
||||||
|
aws_access_key_id=Config.s3_key_id,
|
||||||
|
aws_secret_access_key=Config.s3_access_key)
|
||||||
|
|
||||||
|
|
||||||
|
def download_file(key: str, bucket: str, filename: str):
|
||||||
|
client = s3_connection()
|
||||||
|
obj = client.get_object(Bucket=bucket, Key=key)
|
||||||
|
with open(f"{filename}", 'wb') as f:
|
||||||
|
for chunk in obj['Body'].iter_chunks(chunk_size=4096):
|
||||||
|
f.write(chunk)
|
||||||
|
|
||||||
|
|
||||||
|
def upload_file(filename: str, key: str, bucket: str):
|
||||||
|
client = s3_connection()
|
||||||
|
with open(filename, 'rb') as f:
|
||||||
|
client.put_object(Body=f.read(), Bucket=bucket, Key=key)
|
||||||
|
|
||||||
|
|
||||||
|
def get_branch(bndname: str, scheme: str):
|
||||||
|
conn = db.connect_db()
|
||||||
|
with Session(conn) as session:
|
||||||
|
item = session.query(db.IncomeBranch).filter_by(scheme=scheme).join(
|
||||||
|
db.User).filter_by(bndname=bndname).one_or_none()
|
||||||
|
if item:
|
||||||
|
return item.branch, item.local_scheme
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
|
def restore_uuid(oid):
|
||||||
|
uuid = UUID(oid)
|
||||||
|
return str(uuid)
|
||||||
|
|
||||||
|
|
||||||
|
def load_catalog(params, files, url):
|
||||||
|
logger.warning('load_catalog')
|
||||||
|
date = datetime.datetime.now()
|
||||||
|
rxmls = RequestXmlService()
|
||||||
|
req = ET.fromstring(params['query_data'])
|
||||||
|
req_id = rxmls.get_request_uuid(req)
|
||||||
|
res_id = uuid4().hex
|
||||||
|
res = rxmls.get_request_document(res_id, req_id)
|
||||||
|
rxmls.set_result(res, 0, '')
|
||||||
|
response_params = {
|
||||||
|
'from': params['to'],
|
||||||
|
'to': params['from'],
|
||||||
|
'ts_added': date.timestamp(),
|
||||||
|
'user_id': '1',
|
||||||
|
'user_id_to': params['user_id'],
|
||||||
|
'query_type': 1004,
|
||||||
|
'query_data': ET.tostring(res, encoding='unicode', xml_declaration=True)
|
||||||
|
}
|
||||||
|
catalog = get_catalog()
|
||||||
|
logging.debug('Catalog_loaded')
|
||||||
|
filename = uuid4().hex
|
||||||
|
filepath = '/tmp/' + filename
|
||||||
|
zipf = zipfile.ZipFile(filepath, "w")
|
||||||
|
zipf.writestr('WF.CLL', catalog)
|
||||||
|
zipf.close()
|
||||||
|
response_files = [{'name': filename, 'url': filepath, 'size': os.path.getsize(filepath)}]
|
||||||
|
proxy = ServerProxy(url)
|
||||||
|
proxy.send(response_params, response_files, Config.ret_path)
|
||||||
|
|
||||||
|
|
||||||
|
def get_objects(params, files, url):
|
||||||
|
date = datetime.datetime.now()
|
||||||
|
rxmls = RequestXmlService()
|
||||||
|
req = ET.fromstring(params['query_data'])
|
||||||
|
req_id = rxmls.get_request_uuid(req)
|
||||||
|
res_id = uuid4().hex
|
||||||
|
res = rxmls.get_request_document(res_id, req_id)
|
||||||
|
rxmls.set_result(res, 0, '')
|
||||||
|
objs = req.find('objects')
|
||||||
|
uids = [restore_uuid(x.get('object_id')) for x in objs.findall('object')]
|
||||||
|
|
||||||
|
rxmls.set_result(res, 0, '')
|
||||||
|
response_params = {
|
||||||
|
'from': params['to'],
|
||||||
|
'to': params['from'],
|
||||||
|
'ts_added': date.timestamp(),
|
||||||
|
'user_id': '1',
|
||||||
|
'user_id_to': params['user_id'],
|
||||||
|
'query_type': 1001,
|
||||||
|
'query_data': ET.tostring(res, encoding='unicode', xml_declaration=True)
|
||||||
|
}
|
||||||
|
filename = uuid4().hex
|
||||||
|
filepath = '/tmp/' + filename
|
||||||
|
zipf = zipfile.ZipFile(filepath, "w")
|
||||||
|
main_filename = None
|
||||||
|
for uid in uids:
|
||||||
|
obj = json.loads(get_object(uid))
|
||||||
|
for file in obj['properties'].get('c1000', []):
|
||||||
|
if not main_filename:
|
||||||
|
main_filename = file['fileName']
|
||||||
|
res = requests.get(Config.gql_download, params={'item_id': file["key"]})
|
||||||
|
zipf.writestr(f'{main_filename}/{file["fileName"]}', res.content)
|
||||||
|
zipf.close()
|
||||||
|
response_files = [{'name': filename, 'url': filepath, 'size': os.path.getsize(filepath)}]
|
||||||
|
proxy = ServerProxy(url)
|
||||||
|
proxy.send(response_params, response_files, Config.ret_path)
|
||||||
|
|
||||||
|
|
||||||
|
def get_metadata(params, files, url):
|
||||||
|
date = datetime.datetime.now()
|
||||||
|
rxmls = RequestXmlService()
|
||||||
|
req = ET.fromstring(params['query_data'])
|
||||||
|
req_id = rxmls.get_request_uuid(req)
|
||||||
|
res_id = uuid4().hex
|
||||||
|
res = rxmls.get_request_document(res_id, req_id)
|
||||||
|
rxmls.set_result(res, 0, '')
|
||||||
|
objs = req.find('getMetadataByIds')
|
||||||
|
uids = [restore_uuid(x.get('id')) for x in objs.findall('chart')]
|
||||||
|
|
||||||
|
rxmls.set_result(res, 0, '')
|
||||||
|
response_params = {
|
||||||
|
'from': params['to'],
|
||||||
|
'to': params['from'],
|
||||||
|
'ts_added': date.timestamp(),
|
||||||
|
'user_id': '1',
|
||||||
|
'user_id_to': params['user_id'],
|
||||||
|
'query_type': 1024,
|
||||||
|
'query_data': ET.tostring(res, encoding='unicode', xml_declaration=True)
|
||||||
|
}
|
||||||
|
filename = uuid4().hex
|
||||||
|
filepath = '/tmp/' + filename
|
||||||
|
zipf = zipfile.ZipFile(filepath, "w")
|
||||||
|
content = ET.Element('getMetadataResponse')
|
||||||
|
|
||||||
|
for uid in uids:
|
||||||
|
obj = json.loads(get_object(uid))
|
||||||
|
date = datetime.datetime.fromisoformat(obj['date_updated'])
|
||||||
|
chart = ET.SubElement(content, 'chart', {
|
||||||
|
'id': UUID(obj['uid']).hex,
|
||||||
|
'updated': str(date.timestamp()),
|
||||||
|
})
|
||||||
|
for key in obj['properties']:
|
||||||
|
if not key.startswith('c'):
|
||||||
|
continue
|
||||||
|
mdel = ET.SubElement(chart, 'mdItem', {
|
||||||
|
'code': key.replace('_', '.'),
|
||||||
|
'name': key,
|
||||||
|
'value': str(obj['properties'].get(key, '')),
|
||||||
|
'isBase': 'false',
|
||||||
|
'groupId': '',
|
||||||
|
'groupName': '',
|
||||||
|
})
|
||||||
|
zipf.writestr(f'metadata.xml', ET.tostring(content, encoding='unicode', xml_declaration=True))
|
||||||
|
zipf.close()
|
||||||
|
response_files = [{'name': filename, 'url': filepath, 'size': os.path.getsize(filepath)}]
|
||||||
|
proxy = ServerProxy(url)
|
||||||
|
proxy.send(response_params, response_files, Config.ret_path)
|
||||||
|
|
||||||
|
|
||||||
|
def put_object(params, files, url):
|
||||||
|
date = datetime.datetime.now()
|
||||||
|
req = ET.fromstring(params['query_data'])
|
||||||
|
obj = req.find('chart')
|
||||||
|
class_id = obj.get('Class')
|
||||||
|
con = OOConnectionParams(Config.oodb_schema, Config.oodb_host, Config.oodb_port, Config.oodb_dbname,
|
||||||
|
Config.oodb_username, Config.oodb_passwd, Config.oodb_schema)
|
||||||
|
ws = OODBWorkspace.ws(Config.oodb_schema)
|
||||||
|
if not ws.isInit():
|
||||||
|
res = ws.init(con)
|
||||||
|
logger.warning(res)
|
||||||
|
logging.info(class_id)
|
||||||
|
fc = ws.featureClass(class_id)
|
||||||
|
feature = fc.createFeature('')
|
||||||
|
geom = Polygon.fromExtent(Envelope(0.0, 0.0, 1.0, 1.0, SRFactory.PZ9011()))
|
||||||
|
res = feature.setGeometry(geom)
|
||||||
|
for attr in obj.findall('Attribute'):
|
||||||
|
name = attr.get('name')
|
||||||
|
value = attr.get('value')
|
||||||
|
res &= feature.addAttribute(name, variantFromString(value))
|
||||||
|
|
||||||
|
assert len(files) == 1
|
||||||
|
file = files[0]
|
||||||
|
dir = TemporaryDirectory()
|
||||||
|
with zipfile.ZipFile(file, 'r') as zip_ref:
|
||||||
|
zip_ref.extractall(dir.name)
|
||||||
|
|
||||||
|
fp = pathlib.Path(dir.name)
|
||||||
|
for item in fp.glob('**/*'):
|
||||||
|
if not item.is_file():
|
||||||
|
continue
|
||||||
|
fileVal = FileValue()
|
||||||
|
key = uuid4().hex
|
||||||
|
fileVal.fileName = variantToString(item.relative_to(dir.name))
|
||||||
|
fileVal.key = variantToString(key)
|
||||||
|
fileVal.bucket = variantToString(Config.s3_bucket)
|
||||||
|
res &= feature.addAttribute('c1000', variantFromFileValue(fileVal))
|
||||||
|
upload_file(str(item), key, Config.s3_bucket)
|
||||||
|
|
||||||
|
ws.transaction()
|
||||||
|
res = ws.save()
|
||||||
|
ws.commit(f'Putobject from {params["to"]}')
|
||||||
|
ws.clearData(True)
|
||||||
|
|
||||||
|
|
||||||
|
def run_task(query_type, params, files, url):
|
||||||
|
if query_type == 4:
|
||||||
|
tasks.put(lambda: load_catalog(params, files, url))
|
||||||
|
if query_type == 1:
|
||||||
|
tasks.put(lambda: get_objects(params, files, url))
|
||||||
|
if query_type == 24:
|
||||||
|
tasks.put(lambda: get_metadata(params, files, url))
|
||||||
|
if query_type == 6:
|
||||||
|
tasks.put(lambda: put_object(params, files, url))
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
global connection
|
||||||
|
global server
|
||||||
|
global channel
|
||||||
|
|
||||||
|
logger.setLevel(logging.INFO)
|
||||||
|
logger.warning('Use Control-C to exit')
|
||||||
|
|
||||||
|
thread = threading.Thread(target=run_tasks)
|
||||||
|
thread.start()
|
||||||
|
|
||||||
|
try:
|
||||||
|
logger.warning('Start server')
|
||||||
|
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
logger.warning('Exiting')
|
||||||
|
finally:
|
||||||
|
server.server_close()
|
||||||
|
|
||||||
|
|
||||||
|
def vers_key(e):
|
||||||
|
return e.version()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
39
request_itv/config.py
Normal file
39
request_itv/config.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
|
||||||
|
class Config:
|
||||||
|
ret_path: str = 'http://10.10.8.83:32200/'
|
||||||
|
self_bnd: str = 'bnd127'
|
||||||
|
enserver: str = 'http://10.10.8.83:32210/xmlrpc'
|
||||||
|
remote_bnd: str = 'bnd128'
|
||||||
|
|
||||||
|
pg_host: str = '10.10.8.83'
|
||||||
|
pg_port: int = 32101
|
||||||
|
pg_dbname: str = 'db'
|
||||||
|
pg_username: str = 'postgres'
|
||||||
|
pg_password: str = 'Root12345678'
|
||||||
|
|
||||||
|
oodb_host: str = '10.10.8.83'
|
||||||
|
oodb_port: int = 32100
|
||||||
|
oodb_dbname: str = 'db'
|
||||||
|
oodb_username: str = 'postgres'
|
||||||
|
oodb_passwd: str = 'Root12345678'
|
||||||
|
oodb_schema: str = 'documents_src'
|
||||||
|
|
||||||
|
rabbit_conn: str = 'amqp://user:password@10.10.8.83:31005/%2f'
|
||||||
|
rabbit_queue: str = 'ipd'
|
||||||
|
|
||||||
|
ws_rabbit_params: dict = {
|
||||||
|
'host': '10.10.8.83',
|
||||||
|
'port': 31005,
|
||||||
|
'exchange': 'ipd',
|
||||||
|
'user': 'user',
|
||||||
|
'password': 'password',
|
||||||
|
}
|
||||||
|
|
||||||
|
s3_endpoint: str = 'http://10.10.8.83:31006'
|
||||||
|
s3_key_id: str = 's57'
|
||||||
|
s3_access_key: str = 'd9MMinLF3U8TLSj'
|
||||||
|
s3_bucket: str = 'files'
|
||||||
|
|
||||||
|
gql_url: str = 'https://gql.ivazh.ru/graphql'
|
||||||
|
gql_download: str = 'https://gql.ivazh.ru/item/{key}'
|
||||||
|
gql_schema: str = 'pdim'
|
||||||
12
request_itv/config/response2.json
Normal file
12
request_itv/config/response2.json
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"filestorage": {
|
||||||
|
"type": "s3",
|
||||||
|
"endpoint": "http://10.10.8.83:31006",
|
||||||
|
"key_id": "s57",
|
||||||
|
"access_key": "d9MMinLF3U8TLSj",
|
||||||
|
"download_path": "/tmp"
|
||||||
|
},
|
||||||
|
"file_code": "c1000",
|
||||||
|
"name_code": "c122",
|
||||||
|
"use_version": true
|
||||||
|
}
|
||||||
167
request_itv/config/workspaces.json
Normal file
167
request_itv/config/workspaces.json
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
{
|
||||||
|
"databases": {
|
||||||
|
"oodb_git": {
|
||||||
|
"host": "10.10.8.83",
|
||||||
|
"port": 32100,
|
||||||
|
"database": "db",
|
||||||
|
"user": "postgres",
|
||||||
|
"password": "Root12345678"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"workspaces": {
|
||||||
|
"documents_src": {
|
||||||
|
"type": "documents",
|
||||||
|
"group": "src",
|
||||||
|
"database": "oodb_git",
|
||||||
|
"schema": "documents_src",
|
||||||
|
"alias": "Документы исходная"
|
||||||
|
},
|
||||||
|
"documents_standard": {
|
||||||
|
"type": "documents",
|
||||||
|
"group": "order",
|
||||||
|
"database": "oodb_git",
|
||||||
|
"schema": "documents_standard",
|
||||||
|
"alias": "Документы эталон"
|
||||||
|
},
|
||||||
|
"documents_standard_pub": {
|
||||||
|
"type": "documents",
|
||||||
|
"group": "order",
|
||||||
|
"database": "oodb_git",
|
||||||
|
"schema": "documents_standard",
|
||||||
|
"alias": "Документы публичная"
|
||||||
|
},
|
||||||
|
"ood": {
|
||||||
|
"type": "npd",
|
||||||
|
"group": "src",
|
||||||
|
"database": "oodb_git",
|
||||||
|
"schema": "ood",
|
||||||
|
"alias": "ООБД исходные НПД",
|
||||||
|
"map_service": "VUE_APP_GISAIS_URL:/styles/ood/style.json"
|
||||||
|
},
|
||||||
|
"oodb": {
|
||||||
|
"type": "oodb",
|
||||||
|
"group": "order",
|
||||||
|
"database": "oodb_git",
|
||||||
|
"schema": "kartap",
|
||||||
|
"alias": "ООДБ эталон",
|
||||||
|
"map_service": "VUE_APP_GISAIS_URL:/styles/oodb/style.json"
|
||||||
|
},
|
||||||
|
"oodb_standard": {
|
||||||
|
"type": "oodb",
|
||||||
|
"group": "forming_standard",
|
||||||
|
"database": "oodb_git",
|
||||||
|
"schema": "kartap",
|
||||||
|
"alias": "ООДБ эталон",
|
||||||
|
"map_service": "VUE_APP_GISAIS_URL:/styles/oodb/style.json"
|
||||||
|
},
|
||||||
|
"oodb_working": {
|
||||||
|
"type": "oodb",
|
||||||
|
"group": "forming_work",
|
||||||
|
"database": "oodb_git",
|
||||||
|
"schema": "kartap",
|
||||||
|
"alias": "ООДБ рабочая",
|
||||||
|
"map_service": "VUE_APP_GISAIS_URL:/styles/oodb_tech/style.json"
|
||||||
|
},
|
||||||
|
"oodb_pub": {
|
||||||
|
"type": "oodb",
|
||||||
|
"group": "order",
|
||||||
|
"database": "oodb_git",
|
||||||
|
"schema": "kartap",
|
||||||
|
"alias": "ООБД публичная",
|
||||||
|
"map_service": "VUE_APP_GISAIS_URL:/styles/oodb/style.json"
|
||||||
|
},
|
||||||
|
"regions": {
|
||||||
|
"type": "regions",
|
||||||
|
"database": "oodb_git",
|
||||||
|
"schema": "regions_hard",
|
||||||
|
"alias": "Регионы",
|
||||||
|
"map_service": "VUE_APP_GISAIS_URL_GK:/styles/regions/style.json"
|
||||||
|
},
|
||||||
|
"regions_contour": {
|
||||||
|
"type": "regions",
|
||||||
|
"database": "oodb_git",
|
||||||
|
"schema": "regions_hard",
|
||||||
|
"alias": "Регионы",
|
||||||
|
"map_service": "VUE_APP_GISAIS_URL_GK:/styles/regions_contour/style.json"
|
||||||
|
},
|
||||||
|
"npd_9": {
|
||||||
|
"type": "npd",
|
||||||
|
"database": "oodb_git",
|
||||||
|
"schema": "npd_9",
|
||||||
|
"alias": "НПД 9.0"
|
||||||
|
},
|
||||||
|
"npd": {
|
||||||
|
"type": "npd",
|
||||||
|
"database": "oodb_git",
|
||||||
|
"schema": "initial",
|
||||||
|
"alias": "НПД 9.0"
|
||||||
|
},
|
||||||
|
"npd_831": {
|
||||||
|
"type": "npd",
|
||||||
|
"group": "order",
|
||||||
|
"database": "oodb_git",
|
||||||
|
"schema": "npd_831",
|
||||||
|
"alias": "НПД 8.31"
|
||||||
|
},
|
||||||
|
"updater_test": {
|
||||||
|
"type": "npd",
|
||||||
|
"group": "order",
|
||||||
|
"database": "oodb_git",
|
||||||
|
"schema": "npd_831_test",
|
||||||
|
"alias": "НПД 8.31 публичная"
|
||||||
|
},
|
||||||
|
"lukoil": {
|
||||||
|
"type": "oodb",
|
||||||
|
"database": "oodb_git",
|
||||||
|
"schema": "lukoil",
|
||||||
|
"alias": "ЛУКОЙЛ",
|
||||||
|
"map_service": "VUE_APP_GISAIS_URL_GK:/styles/lukoil/style.json"
|
||||||
|
},
|
||||||
|
"geocover": {
|
||||||
|
"type": "ecpz",
|
||||||
|
"group": "order",
|
||||||
|
"database": "oodb_git",
|
||||||
|
"schema": "coverage",
|
||||||
|
"alias": "ЕЦПЗ"
|
||||||
|
},
|
||||||
|
"geocover_test": {
|
||||||
|
"type": "ecpz",
|
||||||
|
"database": "oodb_git",
|
||||||
|
"schema": "coverage",
|
||||||
|
"alias": "ЕЦПЗ тест"
|
||||||
|
},
|
||||||
|
"gcmr": {
|
||||||
|
"type": "gcmr",
|
||||||
|
"group": "order",
|
||||||
|
"database": "oodb_git",
|
||||||
|
"schema": "gcmr",
|
||||||
|
"alias": "ГЦМР"
|
||||||
|
},
|
||||||
|
"orders": {
|
||||||
|
"type": "system",
|
||||||
|
"database": "oodb_git",
|
||||||
|
"schema": "orders",
|
||||||
|
"alias": "Заказы"
|
||||||
|
},
|
||||||
|
"ilo": {
|
||||||
|
"type": "system",
|
||||||
|
"database": "oodb_git",
|
||||||
|
"schema": "ilo",
|
||||||
|
"alias": "ИЛО"
|
||||||
|
},
|
||||||
|
"raz_sgok": {
|
||||||
|
"type": "raz_sgok",
|
||||||
|
"database": "razsgok",
|
||||||
|
"schema": "razsgok",
|
||||||
|
"alias": "СГОК",
|
||||||
|
"map_service": "VUE_APP_GISAIS_URL_GK:/styles/raz_sgok/style.json"
|
||||||
|
},
|
||||||
|
"raz_vtu": {
|
||||||
|
"type": "raz_vtu",
|
||||||
|
"database": "razvtu",
|
||||||
|
"schema": "razvtu",
|
||||||
|
"alias": "ВТУ",
|
||||||
|
"map_service": "VUE_APP_GISAIS_URL_GK:/styles/raz_vtu/style.json"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
166
request_itv/db.py
Normal file
166
request_itv/db.py
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import List, Optional
|
||||||
|
from sqlalchemy import create_engine, String, select, ForeignKey, Enum
|
||||||
|
from sqlalchemy.orm import Session, DeclarativeBase, Mapped, mapped_column, relationship
|
||||||
|
from config import Config
|
||||||
|
|
||||||
|
|
||||||
|
def tow(day: int, hour: int, minute: int):
|
||||||
|
return minute + hour * 60 + day * 60 * 24
|
||||||
|
|
||||||
|
|
||||||
|
class Base(DeclarativeBase):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class User(Base):
|
||||||
|
__tablename__ = 'users'
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(primary_key=True)
|
||||||
|
username: Mapped[str]
|
||||||
|
passwd: Mapped[str]
|
||||||
|
bndname: Mapped[str]
|
||||||
|
newbnd: Mapped[bool]
|
||||||
|
active: Mapped[bool]
|
||||||
|
upstream: Mapped[bool]
|
||||||
|
|
||||||
|
profiles: Mapped[List['Profile']] = relationship(
|
||||||
|
back_populates='user', cascade='all, delete-orphan'
|
||||||
|
)
|
||||||
|
|
||||||
|
income_branches: Mapped[List['IncomeBranch']] = relationship(
|
||||||
|
back_populates='user', cascade='all, delete-orphan'
|
||||||
|
)
|
||||||
|
|
||||||
|
schedule: Mapped[List['Schedule']] = relationship(
|
||||||
|
back_populates='user', cascade='all, delete-orphan'
|
||||||
|
)
|
||||||
|
|
||||||
|
queue: Mapped[List['Queue']] = relationship(
|
||||||
|
back_populates='user', cascade='all, delete-orphan'
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f'User(id={self.id!r}, username={self.username!r}, password={self.passwd!r}, newbnd={self.newbnd})'
|
||||||
|
|
||||||
|
def to_dict(self) -> dict:
|
||||||
|
return {
|
||||||
|
'id': self.id,
|
||||||
|
'username': self.username,
|
||||||
|
'bndname': self.bndname,
|
||||||
|
'newbnd': self.newbnd,
|
||||||
|
'active': self.active,
|
||||||
|
'upstream': self.upstream,
|
||||||
|
'profiles': [x.to_dict() for x in self.profiles],
|
||||||
|
'schedule': [x.to_dict() for x in self.schedule],
|
||||||
|
'queue': [x.to_dict() for x in self.queue],
|
||||||
|
'income_branches': [x.to_dict() for x in self.income_branches],
|
||||||
|
}
|
||||||
|
|
||||||
|
def is_active_now(self):
|
||||||
|
if not len(self.schedule):
|
||||||
|
return True
|
||||||
|
dt = datetime.now()
|
||||||
|
curr_tow = tow(dt.weekday(), dt.hour, dt.minute)
|
||||||
|
for x in self.schedule:
|
||||||
|
if (tow(x.day_start, x.hour_start, x.minute_start) <= curr_tow
|
||||||
|
<= tow(x.day_end, x.hour_end, x.minute_end)):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class Profile(Base):
|
||||||
|
__tablename__ = 'profiles'
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(primary_key=True)
|
||||||
|
user_id: Mapped[int] = mapped_column(ForeignKey('users.id'))
|
||||||
|
scheme: Mapped[str]
|
||||||
|
branch: Mapped[str] = mapped_column(String, nullable=True)
|
||||||
|
json: Mapped[str] = mapped_column(String, nullable=True)
|
||||||
|
no_files: Mapped[bool]
|
||||||
|
|
||||||
|
user: Mapped['User'] = relationship(back_populates='profiles')
|
||||||
|
|
||||||
|
def to_dict(self) -> dict:
|
||||||
|
return {
|
||||||
|
'id': self.id,
|
||||||
|
'scheme': self.scheme,
|
||||||
|
'branch': self.branch,
|
||||||
|
'json': self.json,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class IncomeBranch(Base):
|
||||||
|
__tablename__ = 'income_branches'
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(primary_key=True)
|
||||||
|
user_id: Mapped[int] = mapped_column(ForeignKey('users.id'))
|
||||||
|
scheme: Mapped[str]
|
||||||
|
branch: Mapped[str]
|
||||||
|
local_scheme: Mapped[str]
|
||||||
|
|
||||||
|
user: Mapped['User'] = relationship(back_populates='income_branches')
|
||||||
|
|
||||||
|
def to_dict(self) -> dict:
|
||||||
|
return {
|
||||||
|
'id': self.id,
|
||||||
|
'scheme': self.scheme,
|
||||||
|
'branch': self.branch,
|
||||||
|
'local_scheme': self.local_scheme,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Schedule(Base):
|
||||||
|
__tablename__ = 'schedule'
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(primary_key=True)
|
||||||
|
user_id: Mapped[int] = mapped_column(ForeignKey('users.id'))
|
||||||
|
day_start: Mapped[int]
|
||||||
|
hour_start: Mapped[int]
|
||||||
|
minute_start: Mapped[int]
|
||||||
|
day_end: Mapped[int]
|
||||||
|
hour_end: Mapped[int]
|
||||||
|
minute_end: Mapped[int]
|
||||||
|
|
||||||
|
user: Mapped['User'] = relationship(back_populates='schedule')
|
||||||
|
|
||||||
|
def to_dict(self) -> dict:
|
||||||
|
return {
|
||||||
|
'id': self.id,
|
||||||
|
'day_start': self.day_start,
|
||||||
|
'hour_start': self.hour_start,
|
||||||
|
'minute_start': self.minute_start,
|
||||||
|
'day_end': self.day_end,
|
||||||
|
'hour_end': self.hour_end,
|
||||||
|
'minute_end': self.minute_end,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Queue(Base):
|
||||||
|
__tablename__ = 'queue'
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(primary_key=True)
|
||||||
|
user_id: Mapped[int] = mapped_column(ForeignKey('users.id'))
|
||||||
|
commit_id: Mapped[str]
|
||||||
|
schema: Mapped[str]
|
||||||
|
|
||||||
|
user: Mapped['User'] = relationship(back_populates='queue')
|
||||||
|
|
||||||
|
def to_dict(self) -> dict:
|
||||||
|
return {
|
||||||
|
'id': self.id,
|
||||||
|
'commit_id': self.commit_id,
|
||||||
|
'schema': self.schema,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Schemas(Base):
|
||||||
|
__tablename__ = 'schemas'
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(primary_key=True)
|
||||||
|
schema: Mapped[str]
|
||||||
|
schema_type: Mapped[str]
|
||||||
|
|
||||||
|
|
||||||
|
def connect_db():
|
||||||
|
return create_engine(f"postgresql+psycopg://{Config.pg_username}:{Config.pg_password}@{Config.pg_host}:{Config.pg_port}/{Config.pg_dbname}")
|
||||||
0
request_itv/reqs/__init__.py
Normal file
0
request_itv/reqs/__init__.py
Normal file
7
request_itv/reqs/classifier.py
Normal file
7
request_itv/reqs/classifier.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
import graphql
|
||||||
|
|
||||||
|
CLASSIFIER_VERSION = '8.23'
|
||||||
|
|
||||||
|
class Classifier:
|
||||||
|
def get_classifier(self):
|
||||||
|
|
||||||
71
request_itv/reqs/correcting_replication_service.py
Normal file
71
request_itv/reqs/correcting_replication_service.py
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
from datetime import datetime, timedelta
|
||||||
|
from enumerations import ReplicationPackageStatusEnum
|
||||||
|
from models import CorrectingReplicationOutPackage, ReplicantInfo
|
||||||
|
from typing import List
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
|
||||||
|
|
||||||
|
class CorrectingReplicationService:
|
||||||
|
EVENT_TYPE: str = 'correcting_replication'
|
||||||
|
object_list_size: int = 0
|
||||||
|
deleted_objects_count: int = 0
|
||||||
|
PACKAGE_TAG_NAME: str = 'package'
|
||||||
|
PACKAGE_ID_ATTRIBUTE_NAME: str = 'pkg_id'
|
||||||
|
SERVER_VERSION_ATTRIBUTE_NAME: str = 'server_version'
|
||||||
|
SERVER_ID_ATTRIBUTE_NAME: str = 'server_id'
|
||||||
|
OBJECTS_TAG_NAME: str = 'objects'
|
||||||
|
OBJECT_TAG_NAME: str = 'object'
|
||||||
|
OBJECT_ID_ATTRIBUTE_NAME: str = 'id'
|
||||||
|
IS_RSC_OBJECT: str = 'is_rsc'
|
||||||
|
OBJECT_SOURCE_ATTRIBUTE_NAME: str = 'source'
|
||||||
|
OBJECT_VERSION_ATTRIBUTE_NAME: str = 'version'
|
||||||
|
METADATA_VERSION_ATTRIBUTE_NAME: str = 'md_version'
|
||||||
|
ACTUAL_VERSION_DATE_ATTRIBUTE_NAME: str = 'version_date'
|
||||||
|
OBJECT_MISSING_ATTRIBUTE_NAME: str = 'missing'
|
||||||
|
CLASSIFIER_VERSION_ATTRIBUTE_NAME: str = 'metadata_clientVersion'
|
||||||
|
RESULT_TAG_NAME: str = 'result'
|
||||||
|
RESULT_MESSAGE_ATTRIBUTE_NAME: str = 'result_message'
|
||||||
|
VERSION_ID_ATTRIBUTE_NAME: str = 'version_id'
|
||||||
|
VERSION_SOURCE_ATTRIBUTE_NAME: str = 'version_source'
|
||||||
|
|
||||||
|
def start_correcting_replication(self, replicant_id: str, replication_period: int, replication_timeout: int,
|
||||||
|
manual_run: bool):
|
||||||
|
replicant = ReplicantInfo()
|
||||||
|
self.start_correcting_replication(replicant, replication_period, replication_timeout, manual_run)
|
||||||
|
|
||||||
|
def start_correcting_replication(self, except_replicant_id: List[str], replication_period: int,
|
||||||
|
replication_timeout: int, manual_run: bool):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def start_correcting_replication(self, replicant: ReplicantInfo, replication_period: int, replication_timeout: int,
|
||||||
|
manual_run: bool):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def create_replication_package(self, replicant: ReplicantInfo, replication_period: int, replication_timeout: int) -> CorrectingReplicationOutPackage:
|
||||||
|
pkg = CorrectingReplicationOutPackage()
|
||||||
|
pkg.filter_string = replicant.filter_string
|
||||||
|
pkg.replicant_id = replicant.replicant_id
|
||||||
|
pkg.server_address = replicant.server_address
|
||||||
|
pkg.replication_period = replication_period
|
||||||
|
pkg.replication_timeout = replication_timeout
|
||||||
|
pkg.status = ReplicationPackageStatusEnum.SENT_REQUEST_TO_CHILD
|
||||||
|
date = datetime.now()
|
||||||
|
pkg.replication_date = date
|
||||||
|
if replication_period:
|
||||||
|
delta = timedelta(days=-replication_period)
|
||||||
|
pkg.from_date = date + delta
|
||||||
|
if replication_timeout:
|
||||||
|
delta = timedelta(seconds=replication_timeout)
|
||||||
|
pkg.replication_timeout_date = date + delta
|
||||||
|
return pkg
|
||||||
|
|
||||||
|
def get_package_xml(self, pkg: CorrectingReplicationOutPackage, request_uuid: str) -> str:
|
||||||
|
request_xml: ET.Element = xml_service.get_common_request_xml(request_uuid)
|
||||||
|
element = ET.SubElement(request_xml, self.PACKAGE_TAG_NAME, {
|
||||||
|
self.PACKAGE_ID_ATTRIBUTE_NAME: pkg.package_id,
|
||||||
|
self.SERVER_VERSION_ATTRIBUTE_NAME: '1.0.0',
|
||||||
|
self.SERVER_ID_ATTRIBUTE_NAME: 'ipd-server',
|
||||||
|
self.CLASSIFIER_VERSION_ATTRIBUTE_NAME: classifier.,
|
||||||
|
})
|
||||||
|
xml_service.set_request_uuid(request_xml, request_uuid)
|
||||||
|
return ET.tostring(request_xml, encoding='unicode', xml_declaration=True)
|
||||||
10
request_itv/reqs/enumerations.py
Normal file
10
request_itv/reqs/enumerations.py
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
class ReplicationPackageStatusEnum(Enum):
|
||||||
|
CANCELLED = -4
|
||||||
|
TIMEDOUT = -3
|
||||||
|
ERROR = -2
|
||||||
|
SENT_REQUEST_TO_CHILD = 1
|
||||||
|
SENT_PACKAGE_TO_CHILD = 2
|
||||||
|
COMPLETED = 3
|
||||||
233
request_itv/reqs/messages.py
Normal file
233
request_itv/reqs/messages.py
Normal file
@@ -0,0 +1,233 @@
|
|||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
class Messages(Enum):
|
||||||
|
REQUEST_EXECUTED_PARTITIALY = 1
|
||||||
|
REQUEST_FAILED = 2
|
||||||
|
OBJECT_FORBIDDEN = 3
|
||||||
|
INVALID_REQUEST_USER = 4
|
||||||
|
ACTION_FORBIDDEN = 5
|
||||||
|
OBJECTS_FORBIDDEN = 6
|
||||||
|
INVALID_REPLICATION_PARAMETERS = 7
|
||||||
|
OBJECT_LIST = 11
|
||||||
|
OBJECT_LIST_ITEM = 12
|
||||||
|
MISSING_TAG = 13
|
||||||
|
NO_OBJECTS_AVAILABLE = 14
|
||||||
|
OBJECT_BASE_CELL_NOT_FOUND = 15
|
||||||
|
WRONG_VERSION_INTERVAL = 16
|
||||||
|
REQUEST_PROCESS_STARTED = 17
|
||||||
|
REQUEST_PROCESS_FINISHED = 18
|
||||||
|
USER_IS_NOT_VALID = 19
|
||||||
|
RESPONSE_DELIVERY_ERROR = 20
|
||||||
|
RESPONSE_DELIVERY_SUCCESS = 21
|
||||||
|
RESPONSE_SEND = 22
|
||||||
|
RESPONSE_SEND_ERROR = 23
|
||||||
|
DUPLICATE_CHART_NAME = 24
|
||||||
|
EXTENDED_METADATA_FORBIDDEN = 25
|
||||||
|
MAX_RESULT_COUNT_EXCEEDED = 26
|
||||||
|
MAX_RESULT_SIZE_EXCEEDED = 27
|
||||||
|
CLIENT_DISCONNECTED = 28
|
||||||
|
NO_REQUEST_DATA_PROVIDED = 29
|
||||||
|
NO_USER_ID_PROVIDER = 30
|
||||||
|
INVALID_REQUEST_DATA = 31
|
||||||
|
UNKNOWN_FILTER_ATTRIBUTE_CODE = 32
|
||||||
|
UNKNOWN_FILTER_OPERATION = 33
|
||||||
|
ARCHIVE_ACCESS_FORBIDDEN = 34
|
||||||
|
BROKEN_S57_FILE = 35
|
||||||
|
UNKNOWN_OBJECT_ID = 36
|
||||||
|
NO_FILTER_IN_REPORT = 37
|
||||||
|
UPLOAD_OBJECT_LIST = 100
|
||||||
|
UPLOAD_OBJECT_LIST_ITEM = 101
|
||||||
|
UPLOAD_WRONG_STRUCTURE = 102
|
||||||
|
FOLDER_NOT_FOUND = 103
|
||||||
|
UPLOAD_WRONG_SUBVERSION = 104
|
||||||
|
UPLOAD_MISSING_CHARTS_FILE = 105
|
||||||
|
UPLOAD_NO_CHARTS_IN_FILE = 106
|
||||||
|
UPLOAD_WRONG_CHART_TYPE = 107
|
||||||
|
UPLOAD_OBJECT_EXISTS = 108
|
||||||
|
UPLOAD_BASE_CELL_NOT_FOUND = 109
|
||||||
|
UPLOAD_WRONG_VERSION_SEQUENCE = 110
|
||||||
|
WRONG_DELETE_VERSION_NUMBER = 111
|
||||||
|
CANT_CHANGE_STAMP = 112
|
||||||
|
OBJECT_ALREADY_UPDATED = 113
|
||||||
|
OBJECT_LOCKED = 114
|
||||||
|
UPLOAD_MAIN_FILE_NOT_FOUND = 115
|
||||||
|
UPLOAD_VERSION_NUMBER_NOT_THE_SAME = 116
|
||||||
|
FROM_ADDRESS = 117
|
||||||
|
TO_ADDRESS = 118
|
||||||
|
UPLOAD_OBJECT_EXISTS_BY_SIZE_AND_CHECKSUM = 119
|
||||||
|
CORRECTING_REPLICATION_TIMED_OUT = 120
|
||||||
|
CORRECTING_REPLICATION_AUTO_STARTED = 121
|
||||||
|
CORRECTING_REPLICATION_MANUAL_STARTED = 122
|
||||||
|
CORRECTING_REPLICATION_ALREADY_STARTED = 123
|
||||||
|
CORRECTING_REPLICATION_CANCELLED = 124
|
||||||
|
REPLICATION_SENT = 125
|
||||||
|
REPLICATION_RECEIVED = 126
|
||||||
|
WRONG_OBJECT_FILE_CHECKSUM = 127
|
||||||
|
WRONG_UPLOADING_FILE_CHECKSUM = 128
|
||||||
|
WRONG_UPLOADING_FILE_SIZE = 129
|
||||||
|
CORRECTING_REPLICATION_INFO = 130
|
||||||
|
CORRECTING_REPLICATION_CLASSIFIER = 131
|
||||||
|
CORRECTING_REPLICATION_LIST_OBJECT = 132
|
||||||
|
GET_CORRECTING_REPLICATION_PACKAGE = 133
|
||||||
|
CORRECTING_REPLICATION_SIZE_LIMIT = 134
|
||||||
|
PACKAGE_INFO = 135
|
||||||
|
CORRECTING_REPLICATION_SIZE_ARCHIVE = 136
|
||||||
|
SET_CORRECTING_REPLICATION_PACKAGE = 137
|
||||||
|
CHECK_DUPLICATE = 138
|
||||||
|
CHECK_TAG = 139
|
||||||
|
DELETED_DUPLICATE = 140
|
||||||
|
STREAMING_REPLICATION_STARTED = 141
|
||||||
|
ERROR_PACKAGE_NUM = 142
|
||||||
|
STREAMING_REPLICATION_FINISHED = 143
|
||||||
|
DELETE_VERSION_STARTED = 144
|
||||||
|
DELETE_VERSION_FINISHED = 145
|
||||||
|
UPLOAD_OBJECTS_EXISTS_DIFF_CHECKSUM = 146
|
||||||
|
UPLOAD_OBJECTS_WITH_LESS_VERSION = 147
|
||||||
|
CORRECTING_REPLICATION_OBJECT_COUNTS = 148
|
||||||
|
PACKAGE_INFO_FAILED_OBJECTS = 149
|
||||||
|
PACKAGE_INFO_INCOMING_OBJECTS = 150
|
||||||
|
OBJECT_INFO_DIDNT_ADD = 151
|
||||||
|
TAG_NOT_FOUND = 200
|
||||||
|
MISSING_ATTRIBUTE = 201
|
||||||
|
DUPLICATE_TAG_ID = 202
|
||||||
|
CANT_HIDE_TAG = 203
|
||||||
|
CANT_MOVE_TAG = 204
|
||||||
|
CANT_HIDE_REGION = 205
|
||||||
|
CANT_HIDE_REGIONS = 206
|
||||||
|
CANT_HIDE_CHILD_REGION = 207
|
||||||
|
CANT_HIDE_CHILD_REGIONS = 208
|
||||||
|
WRONG_MID = 300
|
||||||
|
MAUNFACTURER_KEY_NOT_FOUND = 301
|
||||||
|
OBJECT_NOT_FOUND = 302
|
||||||
|
OBJECT_REQUESTED_FROM_PARENT_SERVER = 303
|
||||||
|
OBJECT_HAS_NO_FILES = 304
|
||||||
|
WRONG_EXPIRATION_PERIOD = 400
|
||||||
|
OBJECT_LIST_HAS_DUBLICATES = 401
|
||||||
|
ACTIVE_SUBSCRIPTION_ALREADY_EXISTS = 402
|
||||||
|
ACTIVE_SUBSCRIPTION_NOT_EXISTS = 403
|
||||||
|
WRONG_TYPE_ALL_CHARTS = 500
|
||||||
|
NO_CLASSIFIER_FILE_ON_SERVER = 600
|
||||||
|
NO_CLASSIFIER_VERSION_ON_SERVER = 601
|
||||||
|
CLASSIFIERS_VERSIONS_DIFFER = 602
|
||||||
|
WRONG_OBJECT_PARAMETER_CODE = 603
|
||||||
|
ILLEGAL_PARAMETER_VALUE = 604
|
||||||
|
ILLEGAL_PARAMETER_TYPE = 605
|
||||||
|
ILLEGAL_PARAMETER_RANGE = 606
|
||||||
|
NOT_ALL_REQUIRED_TYPES = 607
|
||||||
|
ILLEGAL_ATTRIBUTE_VALUE = 608
|
||||||
|
NOT_ALLOWED_ITEM_FOR_TYPE = 609
|
||||||
|
NO_CHART_CLASS_WITH_CODE = 610
|
||||||
|
CHART_CLASS_CONTAINS_NONEXISTED_ELEMENT = 611
|
||||||
|
CLASS_GROUP_CONTAINS_NONEXISTED_CLASS = 612
|
||||||
|
SAME_CLASSIFIER_VERSION = 614
|
||||||
|
NO_CLASSIFIER_VERSION = 615
|
||||||
|
UNKNOWN_ATTR_VAL_DATA_TYPE = 616
|
||||||
|
NOT_ALL_USING_ITEMS_IN_CLASSIFIER = 617
|
||||||
|
NO_REQIRED_CLASS_CODE_IN_CLASSIFIER = 618
|
||||||
|
REQUIRED_ADDED_TO_CHART_CLASS = 619
|
||||||
|
SET_CLASSIFIER_STARTED_AUDIT = 620
|
||||||
|
DATA_VALIDATION_STARTED_AUDIT = 621
|
||||||
|
DATA_VALIDATION_FAILED_AUDIT = 622
|
||||||
|
DATA_VALIDATION_ENDED_AUDIT = 623
|
||||||
|
SAVING_CLASSIFIER_STARTED_AUDIT = 624
|
||||||
|
SAVING_CLASSIFIER_FINISHED_AUDIT = 625
|
||||||
|
SEND_SET_CLASSIFIER_REQUEST_AUDIT = 626
|
||||||
|
GET_SET_CLASSIFIER_REQUEST_AUDIT = 627
|
||||||
|
GET_GET_CLASSIFIER_REQUEST_AUDIT = 628
|
||||||
|
IKP_USER_DECISION_TYPE = 700
|
||||||
|
IKP_FILE_STRUCTURE_CHECKER_TYPE = 701
|
||||||
|
IKP_FILE_STRUCTURE_DB_LINKS_CHECKER_TYPE = 702
|
||||||
|
IKP_DB_LINKS_FILE_STRUCTURE_CHECKER_TYPE = 703
|
||||||
|
IKP_CHECK_SUM_CHECKER_TYPE = 704
|
||||||
|
IKP_BACKUP_TO_DB_TYPE = 705
|
||||||
|
IKP_RECOVER_FROM_DB_TYPE = 706
|
||||||
|
IKP_RECOVER_FROM_BACKUP_SERVER_TYPE = 707
|
||||||
|
IKP_RECOVER_FROM_CHILD_SERVER_TYPE = 708
|
||||||
|
IKP_DB_STRUCTURE_CHECKER_TYPE = 709
|
||||||
|
IKP_DB_STRUCTURE_MISSING_COLUMN = 710
|
||||||
|
IKP_DB_STRUCTURE_WRONG_COLUMN_TYPE = 711
|
||||||
|
IKP_DB_STRUCTURE_WRONT_MAX_LENGTH = 712
|
||||||
|
IKP_DB_STRUCTURE_MISSING_TABLE = 713
|
||||||
|
IKP_NOTHING_TO_RECOVER = 714
|
||||||
|
IKP_ERROR_NOT_FIXED = 715
|
||||||
|
IKP_FULL_BACKUP_TO_DB_TYPE = 716
|
||||||
|
EXPORT_TO_FOLDER_START = 717
|
||||||
|
EXPORT_TO_FOLDER_FINISHED = 718
|
||||||
|
EXPORT_TO_FOLDER_ERROR = 719
|
||||||
|
EXPORT_TO_FOLDER_DELETE_FILE = 720
|
||||||
|
IMPORT_FROM_FOLDER_START = 721
|
||||||
|
IMPORT_FROM_FOLDER_FINISHED = 722
|
||||||
|
IMPORT_FROM_FOLDER_ERROR = 723
|
||||||
|
IMPORT_FOLDER_NOT_FOUND = 724
|
||||||
|
FILE_NOT_FOUND = 725
|
||||||
|
IMPORT_FROM_FOLDER_MERGE_ERROR = 726
|
||||||
|
FILE_NOT_FOUND_COUNTER = 727
|
||||||
|
LAST_CORRECTION_DATE_NEWER_THAN_NEW_CORRECTION = 728
|
||||||
|
CREATION_DATE_MISSING = 729
|
||||||
|
IMPORT_FROM_FOLDER_DIDNT_START = 730
|
||||||
|
EXPORT_TO_FOLDER_DIDNT_START = 731
|
||||||
|
IKP_STEP_START = 800
|
||||||
|
IKP_STEP_SUCCESS = 801
|
||||||
|
IKP_STEP_ERROR = 802
|
||||||
|
IKP_NO_CHECK_ERROR_FOUND = 803
|
||||||
|
IKP_SEND_USER_DECISION = 804
|
||||||
|
IKP_GET_USER_DECISION = 805
|
||||||
|
IKP_FILE_STRUCTURE_ERROR = 806
|
||||||
|
IKP_FILE_STRUCTURE_DB_LINKS_ERROR = 807
|
||||||
|
IKP_CHECK_SUM_ERROR = 808
|
||||||
|
IKP_DB_LINKS_FILE_STRUCTURE_ERROR = 809
|
||||||
|
IKP_DB_STRUCTURE_TABLE_COLUMN_ERROR = 810
|
||||||
|
IKP_DB_STRUCTURE_TABLE_ERROR = 811
|
||||||
|
IKP_STEP_STOPPED = 812
|
||||||
|
IKP_WRONG_FILE_ERROR = 813
|
||||||
|
IKP_TASK_RUN_COLLISION = 814
|
||||||
|
IKP_FIX_SXF_METADATA_CHECKSUM_INFO = 815
|
||||||
|
IKP_FIX_SXF_METADATA_CHECKSUM_PASSPORT_ERROR = 816
|
||||||
|
IKP_FIX_SXF_METADATA_CHECKSUM_FILE_ERROR = 817
|
||||||
|
IKP_MD_RECOVERING_STARTED = 818
|
||||||
|
IKP_MD_RECOVERING_NO_VERSION = 819
|
||||||
|
IKP_MD_RECOVERING_CREATED = 820
|
||||||
|
IKP_MD_RECOVERING_UPDATED = 821
|
||||||
|
IKP_MD_RECOVERING_SUCCEED = 822
|
||||||
|
IKP_MD_RECOVERING_FAILED = 823
|
||||||
|
IKP_CHECK_AVAILABLE_DISK_SPACE_TYPE = 825
|
||||||
|
IKP_SCRIPT_RUN = 827
|
||||||
|
IKP_SCRIPT_END_WORK_SUCCEED = 828
|
||||||
|
IKP_SCRIPT_END_WORK_FAILED = 829
|
||||||
|
IKP_SCRIPT_ALREADY_STARTED = 831
|
||||||
|
IKP_MERGE_SAME_OBJECTS_INFO = 832
|
||||||
|
IKP_MERGE_SAME_OBJECTS_SUCCESSES = 833
|
||||||
|
IKP_MERGE_SAME_OBJECTS_WARNING = 834
|
||||||
|
IKP_MERGE_SAME_OBJECTS_ERROR = 835
|
||||||
|
IKP_MERGE_SAME_OBJECTS_MERGE_TYPE_VERSIONS = 836
|
||||||
|
IKP_MERGE_SAME_OBJECTS_MERGE_TYPE_DATES = 837
|
||||||
|
RSC_CLASSIFIER_UPLOAD_STARTED = 838
|
||||||
|
RSC_CLASSIFIER_UPLOAD_ERROR = 839
|
||||||
|
RSC_CLASSIFIER_UPLOAD_FINISHED = 843
|
||||||
|
IKP_CLEAR_OUT_FOLDER_INFO = 840
|
||||||
|
IKP_CLEAR_OUT_FOLDER_SUCCESS_DELETE = 841
|
||||||
|
IKP_CLEAR_OUT_FOLDER_ERROR_DELETE = 842
|
||||||
|
SET_CHART_METADATA = 844
|
||||||
|
ERROR_WHILE_COPING_FILE = 845
|
||||||
|
ERROR_ADD_OBJECT = 848
|
||||||
|
MISSING_REGIONS_CHARTS = 849
|
||||||
|
NO_MISSING_REGIONS = 850
|
||||||
|
IKP_CLEAR_DOWNLOAD_CATALOG_WARNING = 855
|
||||||
|
IKP_CLEAR_DOWNLOAD_CATALOG_ERROR = 856
|
||||||
|
IKP_CLEAR_DOWNLOAD_CATALOG_INFO = 857
|
||||||
|
REPLICATION_PACKAGE_NOT_FOUND = 858
|
||||||
|
REPLICATION_PACKAGE_WRONG_STATUS = 859
|
||||||
|
NOT_UNIQUE_METADATA = 860
|
||||||
|
IKP_SCALE_RECOVERING_STARTED = 861
|
||||||
|
IKP_SCALE_RECOVERING_FAILED = 862
|
||||||
|
IKP_SCALE_RECOVERING_FINISHED = 863
|
||||||
|
OBJECTS_REGIONS_CHECKING_STARTED = 865
|
||||||
|
OBJECTS_REGIONS_CHECKING_FINISHED = 866
|
||||||
|
OBJECTS_REGIONS_CHECKING_FAILED = 867
|
||||||
|
BLACK_HOLE_TRANSMITTER_IS_OFF = 868
|
||||||
|
LACK_VERSIONS_IN_TIME_PERIOD = 869
|
||||||
|
ERROR_STOPPING_IKP = 870
|
||||||
|
IKP_STOPPED = 871
|
||||||
|
REPLICATION_ERROR_TEST = 872
|
||||||
|
BLACK_HOLE_NOT_PRESENT = 873
|
||||||
23
request_itv/reqs/models.py
Normal file
23
request_itv/reqs/models.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
class ReplicantInfo:
|
||||||
|
server_address: str
|
||||||
|
filter_string: str
|
||||||
|
replicant_id: str
|
||||||
|
replicant_user_id: str
|
||||||
|
replicate_classifiers: bool
|
||||||
|
|
||||||
|
|
||||||
|
class CorrectingReplicationOutPackage:
|
||||||
|
package_id: int
|
||||||
|
status: int
|
||||||
|
from_date: datetime
|
||||||
|
replication_date: datetime
|
||||||
|
replication_timeout_date: datetime
|
||||||
|
end_date: datetime
|
||||||
|
replicant_id: str
|
||||||
|
server_address: str
|
||||||
|
filter_string: str
|
||||||
|
replication_period: int
|
||||||
|
replication_timeout: int
|
||||||
4
request_itv/reqs/request_processor.py
Normal file
4
request_itv/reqs/request_processor.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
|
||||||
|
class RequestProcessor:
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
109
request_itv/reqs/request_xml_service.py
Normal file
109
request_itv/reqs/request_xml_service.py
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from request_itv.reqs.messages import Messages
|
||||||
|
|
||||||
|
|
||||||
|
class RequestXmlService:
|
||||||
|
REQUEST_NODE_NAME: str = 'request'
|
||||||
|
HEADER_NODE_NAME: str = 'header'
|
||||||
|
RESULT_NODE_NAME: str = 'result'
|
||||||
|
REQUEST_ID_ATTRIBUTE_NAME: str = 'parcel_id'
|
||||||
|
RESPONSE_ID_ATTRIBUTE_NAME: str = 'reply_parcel_id'
|
||||||
|
SOURCE_REQUEST_ID_ATTRIBUTE_NAME: str = 'source_parcel_id'
|
||||||
|
RESULT_CODE_ATTRIBUTE_NAME: str = 'result_code'
|
||||||
|
RESULT_MESSAGE_ATTRIBUTE_NAME: str = 'result_message'
|
||||||
|
HEADER_XPATH: str = HEADER_NODE_NAME
|
||||||
|
|
||||||
|
def get_request_uuid(self, document: ET.Element) -> str:
|
||||||
|
return self.get_header_attribute(document, self.REQUEST_ID_ATTRIBUTE_NAME)
|
||||||
|
|
||||||
|
def get_response_uuid(self, document: ET.Element) -> str:
|
||||||
|
return self.get_header_attribute(document, self.RESPONSE_ID_ATTRIBUTE_NAME)
|
||||||
|
|
||||||
|
def get_source_request_uuid(self, document: ET.Element) -> str:
|
||||||
|
return self.get_header_attribute(document, self.SOURCE_REQUEST_ID_ATTRIBUTE_NAME)
|
||||||
|
|
||||||
|
def get_header_node(self, document: ET.Element) -> ET.Element:
|
||||||
|
return document.find(self.HEADER_XPATH)
|
||||||
|
|
||||||
|
def get_header_attribute(self, document: ET.Element, attribute_name: str) -> str:
|
||||||
|
header = self.get_header_node(document)
|
||||||
|
if header is None:
|
||||||
|
raise Exception(Messages.MISSING_TAG.value)
|
||||||
|
result = header.get(attribute_name)
|
||||||
|
return result.strip() if result else ''
|
||||||
|
|
||||||
|
def get_request_document(self, request_uuid: str, response_uuid) -> ET.Element:
|
||||||
|
document = ET.Element(self.REQUEST_NODE_NAME)
|
||||||
|
header = ET.SubElement(document, self.HEADER_NODE_NAME)
|
||||||
|
header.set(self.REQUEST_ID_ATTRIBUTE_NAME, request_uuid.strip())
|
||||||
|
if response_uuid:
|
||||||
|
header.set(self.RESPONSE_ID_ATTRIBUTE_NAME, response_uuid.strip())
|
||||||
|
return document
|
||||||
|
|
||||||
|
def get_common_response_xml(self, request_uuid: str, response_uuid: str, result_code: int, result_message: str) -> ET.Element:
|
||||||
|
document = self.get_request_document(response_uuid, request_uuid)
|
||||||
|
|
||||||
|
result = ET.SubElement(document, self.RESULT_NODE_NAME)
|
||||||
|
result.set(self.RESULT_CODE_ATTRIBUTE_NAME, str(result_code))
|
||||||
|
result.set(self.RESULT_MESSAGE_ATTRIBUTE_NAME, result_message)
|
||||||
|
|
||||||
|
return document
|
||||||
|
|
||||||
|
def set_result(self, document: ET.Element, result_code: int, result_message: str):
|
||||||
|
result = document.find(self.RESULT_NODE_NAME)
|
||||||
|
if result is None:
|
||||||
|
result = ET.SubElement(document, self.RESULT_NODE_NAME)
|
||||||
|
result.set(self.RESULT_CODE_ATTRIBUTE_NAME, str(result_code))
|
||||||
|
result.set(self.RESULT_MESSAGE_ATTRIBUTE_NAME, result_message)
|
||||||
|
|
||||||
|
def set_request_uuid(self, document: ET.Element, request_uuid: str):
|
||||||
|
header = self.get_header_node(document)
|
||||||
|
header.set(self.REQUEST_ID_ATTRIBUTE_NAME, request_uuid.strip())
|
||||||
|
|
||||||
|
def set_source_request_uuid(self, document: ET.Element, source_request_uuid: str):
|
||||||
|
header = self.get_header_node(document)
|
||||||
|
header.set(self.SOURCE_REQUEST_ID_ATTRIBUTE_NAME, source_request_uuid.strip())
|
||||||
|
|
||||||
|
def get_common_request_xml(self, request_uuid: str) -> ET.Element:
|
||||||
|
return self.get_request_document(request_uuid, None)
|
||||||
|
|
||||||
|
def get_response_xml(self, document: ET.Element, request_uuid, response_uuid: str, result_code: int, result_message: str) -> ET.Element:
|
||||||
|
header = document.find(self.HEADER_NODE_NAME)
|
||||||
|
header.set(self.REQUEST_ID_ATTRIBUTE_NAME, response_uuid.strip())
|
||||||
|
header.set(self.RESPONSE_ID_ATTRIBUTE_NAME, request_uuid.strip() if request_uuid else None)
|
||||||
|
result = document.find(self.RESULT_NODE_NAME)
|
||||||
|
result.set(self.RESULT_CODE_ATTRIBUTE_NAME, str(result_code))
|
||||||
|
result.set(self.RESULT_MESSAGE_ATTRIBUTE_NAME, result_message)
|
||||||
|
|
||||||
|
def get_element(self, document: ET.Element, node: ET.Element, name: str) -> ET.Element:
|
||||||
|
for child in node:
|
||||||
|
if child.tag.casefold() == name.casefold():
|
||||||
|
return child
|
||||||
|
return ET.SubElement(document, name)
|
||||||
|
|
||||||
|
def validate_common_request_xml(self, xml: str):
|
||||||
|
if not xml:
|
||||||
|
raise Exception(Messages.NO_REQUEST_DATA_PROVIDED.name)
|
||||||
|
try:
|
||||||
|
document = ET.parse(xml)
|
||||||
|
except:
|
||||||
|
raise Exception(Messages.INVALID_REQUEST_DATA)
|
||||||
|
self.validate_common_request_xml(document)
|
||||||
|
|
||||||
|
def validate_common_request_xml(self, document):
|
||||||
|
if not document:
|
||||||
|
raise Exception(Messages.NO_REQUEST_DATA_PROVIDED.name)
|
||||||
|
if document.tag != self.REQUEST_NODE_NAME:
|
||||||
|
raise Exception(Messages.MISSING_TAG.name, self.REQUEST_NODE_NAME)
|
||||||
|
try:
|
||||||
|
request_uuid = self.get_header_attribute(document, self.REQUEST_ID_ATTRIBUTE_NAME)
|
||||||
|
except:
|
||||||
|
raise Exception(Messages.INVALID_REQUEST_DATA)
|
||||||
|
if not request_uuid:
|
||||||
|
raise Exception(Messages.MISSING_ATTRIBUTE, self.REQUEST_ID_ATTRIBUTE_NAME, self.HEADER_NODE_NAME)
|
||||||
|
|
||||||
|
def get_result_code(self, document: ET.Element):
|
||||||
|
result = self.get_element(document, document, self.RESULT_NODE_NAME)
|
||||||
|
if not result:
|
||||||
|
return None
|
||||||
|
return result.get(self.RESULT_CODE_ATTRIBUTE_NAME)
|
||||||
46
request_itv/reqs_graphql.py
Normal file
46
request_itv/reqs_graphql.py
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
from gql import gql, Client
|
||||||
|
from gql.transport.aiohttp import AIOHTTPTransport
|
||||||
|
from .config import Config
|
||||||
|
|
||||||
|
transport = AIOHTTPTransport(url=Config.gql_url)
|
||||||
|
service = Config.gql_schema
|
||||||
|
|
||||||
|
|
||||||
|
def get_classifier():
|
||||||
|
client = Client(transport=transport, fetch_schema_from_transport=True, execute_timeout=None)
|
||||||
|
query = gql(
|
||||||
|
"""
|
||||||
|
query getClassifier($name: String!) {
|
||||||
|
getClassifier(name: $name)
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
result = client.execute(query, variable_values={"name": service}, )
|
||||||
|
return result['getClassifier']
|
||||||
|
|
||||||
|
|
||||||
|
def get_catalog():
|
||||||
|
client = Client(transport=transport, fetch_schema_from_transport=True, execute_timeout=None)
|
||||||
|
query = gql(
|
||||||
|
"""
|
||||||
|
query getCatalog($name: String!) {
|
||||||
|
getCatalog(name: $name)
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
result = client.execute(query, variable_values={"name": service})
|
||||||
|
return result['getCatalog']
|
||||||
|
|
||||||
|
|
||||||
|
def get_object(oid: str):
|
||||||
|
client = Client(transport=transport, fetch_schema_from_transport=True, execute_timeout=None)
|
||||||
|
query = gql(
|
||||||
|
"""
|
||||||
|
query getObjects($oid: String!, $name: String!) {
|
||||||
|
getObject(name: $name, oid: $oid)
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
params = {'oid': oid, 'name': service}
|
||||||
|
result = client.execute(query, variable_values=params)
|
||||||
|
return result['getObject']
|
||||||
23
request_itv/zip.py
Normal file
23
request_itv/zip.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
import os
|
||||||
|
from tempfile import TemporaryDirectory, mktemp
|
||||||
|
from zipfile import ZipFile
|
||||||
|
|
||||||
|
|
||||||
|
class Zip:
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.dir = TemporaryDirectory()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dirname(self):
|
||||||
|
return self.dir.name
|
||||||
|
|
||||||
|
def pack(self):
|
||||||
|
tmp_zip = mktemp(suffix='.zip')
|
||||||
|
with ZipFile(tmp_zip, 'w') as zip_object:
|
||||||
|
for folder_name, sub_folders, file_names in os.walk(self.dir.name):
|
||||||
|
for filename in file_names:
|
||||||
|
file_path = os.path.join(folder_name, filename)
|
||||||
|
rel_file_path = os.path.relpath(file_path, self.dir.name)
|
||||||
|
zip_object.write(file_path, rel_file_path)
|
||||||
|
return tmp_zip
|
||||||
40
requirements.txt
Normal file
40
requirements.txt
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
--find-links=deps
|
||||||
|
aiohttp==3.8.4
|
||||||
|
aiosignal==1.3.1
|
||||||
|
annotated-types==0.5.0
|
||||||
|
anyio==3.7.1
|
||||||
|
async-timeout==4.0.3
|
||||||
|
attrs==23.1.0
|
||||||
|
backoff==2.2.1
|
||||||
|
boto3==1.28.37
|
||||||
|
botocore==1.31.40
|
||||||
|
certifi==2023.7.22
|
||||||
|
charset-normalizer==3.2.0
|
||||||
|
click==8.1.7
|
||||||
|
fastapi==0.103.1
|
||||||
|
frozenlist==1.4.0
|
||||||
|
gql==3.5.0b5
|
||||||
|
graphql-core==3.3.0a3
|
||||||
|
greenlet==2.0.2
|
||||||
|
h11==0.14.0
|
||||||
|
idna==3.4
|
||||||
|
jmespath==1.0.1
|
||||||
|
multidict==6.0.4
|
||||||
|
pika==1.3.2
|
||||||
|
pika-stubs==0.1.3
|
||||||
|
psycopg==3.1.10
|
||||||
|
pydantic==2.3.0
|
||||||
|
pydantic_core==2.6.3
|
||||||
|
pygost==5.12
|
||||||
|
python-dateutil==2.8.2
|
||||||
|
python-multipart==0.0.6
|
||||||
|
requests==2.31.0
|
||||||
|
s3transfer==0.6.2
|
||||||
|
six==1.16.0
|
||||||
|
sniffio==1.3.0
|
||||||
|
SQLAlchemy==2.0.20
|
||||||
|
starlette==0.27.0
|
||||||
|
typing_extensions==4.7.1
|
||||||
|
urllib3==1.26
|
||||||
|
uvicorn==0.23.2
|
||||||
|
yarl==1.9.2
|
||||||
Reference in New Issue
Block a user