Change configs
This commit is contained in:
@@ -7,7 +7,7 @@ readme = "README.md"
|
|||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = "^3.8"
|
python = "^3.8"
|
||||||
aiohttp = "3.8.6"
|
aiohttp = "3.9.5"
|
||||||
aiosignal = "1.3.1"
|
aiosignal = "1.3.1"
|
||||||
annotated-types = "0.5.0"
|
annotated-types = "0.5.0"
|
||||||
anyio = "3.7.1"
|
anyio = "3.7.1"
|
||||||
@@ -46,6 +46,7 @@ urllib3 = "1.26.18"
|
|||||||
uvicorn = "0.20.0"
|
uvicorn = "0.20.0"
|
||||||
yarl = "1.9.2"
|
yarl = "1.9.2"
|
||||||
deb_structurer = "*"
|
deb_structurer = "*"
|
||||||
|
bestconfig = "1.3.6"
|
||||||
|
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
|
|||||||
@@ -1,17 +1,17 @@
|
|||||||
--find-links=deps
|
--find-links=deps
|
||||||
aiohttp==3.8.4
|
aiohttp==3.9.5
|
||||||
aiosignal==1.3.1
|
aiosignal==1.3.1
|
||||||
annotated-types==0.5.0
|
annotated-types==0.5.0
|
||||||
anyio==3.7.1
|
anyio==3.7.1
|
||||||
async-timeout==4.0.3
|
async-timeout==4.0.3
|
||||||
attrs==23.1.0
|
attrs==23.1.0
|
||||||
backoff==2.2.1
|
backoff==2.2.1
|
||||||
boto3==1.28.37
|
boto3==1.34.122
|
||||||
botocore==1.31.40
|
botocore==1.34.122
|
||||||
certifi==2023.7.22
|
certifi==2023.7.22
|
||||||
charset-normalizer==3.2.0
|
charset-normalizer==3.2.0
|
||||||
click==8.1.7
|
click==8.1.7
|
||||||
fastapi==0.103.1
|
fastapi==0.111.0
|
||||||
frozenlist==1.4.0
|
frozenlist==1.4.0
|
||||||
gql==3.5.0b5
|
gql==3.5.0b5
|
||||||
graphql-core==3.3.0a3
|
graphql-core==3.3.0a3
|
||||||
@@ -27,14 +27,14 @@ pydantic==2.3.0
|
|||||||
pydantic_core==2.6.3
|
pydantic_core==2.6.3
|
||||||
pygost==5.12
|
pygost==5.12
|
||||||
python-dateutil==2.8.2
|
python-dateutil==2.8.2
|
||||||
python-multipart==0.0.6
|
|
||||||
requests==2.31.0
|
requests==2.31.0
|
||||||
s3transfer==0.6.2
|
s3transfer==0.10.1
|
||||||
six==1.16.0
|
six==1.16.0
|
||||||
sniffio==1.3.0
|
sniffio==1.3.0
|
||||||
SQLAlchemy==2.0.20
|
SQLAlchemy==2.0.20
|
||||||
starlette==0.27.0
|
starlette==0.37.2
|
||||||
typing_extensions==4.7.1
|
typing_extensions==4.12.2
|
||||||
urllib3==1.26
|
urllib3==2.2.1
|
||||||
uvicorn==0.23.2
|
uvicorn==0.23.2
|
||||||
yarl==1.9.2
|
yarl==1.9.2
|
||||||
|
bestconfig==1.3.6
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ from pygost import gost34112012256
|
|||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
from .reqs.request_xml_service import RequestXmlService
|
from .reqs.request_xml_service import RequestXmlService
|
||||||
import zipfile
|
import zipfile
|
||||||
from .config import Config
|
from .config import config
|
||||||
from .zip import Zip
|
from .zip import Zip
|
||||||
import boto3
|
import boto3
|
||||||
import xmlrpcserver.db as db
|
import xmlrpcserver.db as db
|
||||||
@@ -50,9 +50,9 @@ channel_receive: Optional[Any] = None
|
|||||||
|
|
||||||
|
|
||||||
def s3_connection():
|
def s3_connection():
|
||||||
return boto3.client('s3', endpoint_url=Config.s3_endpoint,
|
return boto3.client('s3', endpoint_url=config.default.XMLRPCSERVER.s3.endpoint,
|
||||||
aws_access_key_id=Config.s3_key_id,
|
aws_access_key_id=config.default.XMLRPCSERVER.s3.key_id,
|
||||||
aws_secret_access_key=Config.s3_access_key)
|
aws_secret_access_key=config.default.XMLRPCSERVER.s3.access_key)
|
||||||
|
|
||||||
|
|
||||||
def download_file(key: str, bucket: str, filename: str):
|
def download_file(key: str, bucket: str, filename: str):
|
||||||
@@ -81,7 +81,7 @@ def pika_callback(ch, method, properties, body):
|
|||||||
file['url'] = fn
|
file['url'] = fn
|
||||||
files.append(file)
|
files.append(file)
|
||||||
proxy = ServerProxy(url)
|
proxy = ServerProxy(url)
|
||||||
proxy.send(params, files, Config.ret_path)
|
proxy.send(params, files, config.default.XMLRPCSERVER.ret_path)
|
||||||
finally:
|
finally:
|
||||||
ch.basic_ack(delivery_tag=method.delivery_tag)
|
ch.basic_ack(delivery_tag=method.delivery_tag)
|
||||||
|
|
||||||
@@ -90,7 +90,8 @@ def pika_task():
|
|||||||
global connection
|
global connection
|
||||||
global channel_receive
|
global channel_receive
|
||||||
channel_receive = connection.channel()
|
channel_receive = connection.channel()
|
||||||
channel_receive.basic_consume(queue=Config.rabbit_incoming_queue, on_message_callback=pika_callback)
|
channel_receive.basic_consume(queue=config.default.XMLRPCSERVER.amqp.incoming_queue,
|
||||||
|
on_message_callback=pika_callback)
|
||||||
channel_receive.start_consuming()
|
channel_receive.start_consuming()
|
||||||
|
|
||||||
|
|
||||||
@@ -128,9 +129,11 @@ def put_object(params, files, url):
|
|||||||
req = ET.fromstring(params['query_data'])
|
req = ET.fromstring(params['query_data'])
|
||||||
obj = req.find('chart')
|
obj = req.find('chart')
|
||||||
class_id = obj.get('Class')
|
class_id = obj.get('Class')
|
||||||
con = OOConnectionParams(Config.oodb_schema, Config.oodb_host, Config.oodb_port, Config.oodb_dbname,
|
con = OOConnectionParams(config.default.XMLRPCSERVER.oodb.schema, config.default.XMLRPCSERVER.oodb.host,
|
||||||
Config.oodb_username, Config.oodb_passwd, Config.oodb_schema)
|
config.default.XMLRPCSERVER.oodb.port, config.default.XMLRPCSERVER.oodb.dbname,
|
||||||
ws = OODBWorkspace.ws(Config.oodb_schema)
|
config.default.XMLRPCSERVER.oodb.username, config.default.XMLRPCSERVER.oodb.passwd,
|
||||||
|
config.default.XMLRPCSERVER.oodb.schema)
|
||||||
|
ws = OODBWorkspace.ws(config.default.XMLRPCSERVER.oodb.schema)
|
||||||
if not ws.isInit():
|
if not ws.isInit():
|
||||||
res = ws.init(con)
|
res = ws.init(con)
|
||||||
logger.warning(res)
|
logger.warning(res)
|
||||||
@@ -158,9 +161,9 @@ def put_object(params, files, url):
|
|||||||
key = uuid4().hex
|
key = uuid4().hex
|
||||||
fileVal.fileName = variantToString(item.relative_to(dir.name))
|
fileVal.fileName = variantToString(item.relative_to(dir.name))
|
||||||
fileVal.key = variantToString(key)
|
fileVal.key = variantToString(key)
|
||||||
fileVal.bucket = variantToString(Config.s3_bucket_itv)
|
fileVal.bucket = variantToString(config.default.XMLRPCSERVER.s3.bucket_itv)
|
||||||
res &= feature.addAttribute('c1000', variantFromFileValue(fileVal))
|
res &= feature.addAttribute('c1000', variantFromFileValue(fileVal))
|
||||||
upload_file(str(item), key, Config.s3_bucket_itv)
|
upload_file(str(item), key, config.default.XMLRPCSERVER.s3.bucket_itv)
|
||||||
|
|
||||||
ws.transaction()
|
ws.transaction()
|
||||||
res = ws.save()
|
res = ws.save()
|
||||||
@@ -173,15 +176,15 @@ def accept(params, files, url):
|
|||||||
files_s3 = []
|
files_s3 = []
|
||||||
for file in params['files']:
|
for file in params['files']:
|
||||||
fn = uuid4().hex
|
fn = uuid4().hex
|
||||||
upload_file(file['url'], fn, Config.s3_bucket)
|
upload_file(file['url'], fn, config.default.XMLRPCSERVER.s3.bucket)
|
||||||
file['url'] = {'name': fn, 'bucket': Config.s3_bucket}
|
file['url'] = {'name': fn, 'bucket': config.default.XMLRPCSERVER.s3.bucket}
|
||||||
files_s3.append(file)
|
files_s3.append(file)
|
||||||
data = {
|
data = {
|
||||||
'params': params,
|
'params': params,
|
||||||
'files': files_s3,
|
'files': files_s3,
|
||||||
'url': url,
|
'url': url,
|
||||||
}
|
}
|
||||||
channel_send.basic_publish(exchange=Config.rabbit_send_exchange, body=json.dumps(data))
|
channel_send.basic_publish(exchange=config.default.XMLRPCSERVER.amqp.send_exchange, body=json.dumps(data))
|
||||||
logger.warning('Accept: ' + json.dumps(params, ensure_ascii=False))
|
logger.warning('Accept: ' + json.dumps(params, ensure_ascii=False))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -198,7 +201,7 @@ def onDelivered(params, files, callback_url):
|
|||||||
def bnd_connected(bnd_name: str):
|
def bnd_connected(bnd_name: str):
|
||||||
global channel_send
|
global channel_send
|
||||||
logger.warning(f'{bnd_name} connected')
|
logger.warning(f'{bnd_name} connected')
|
||||||
channel_send.basic_publish(exchange=Config.rabbit_status_queue, body=json.dumps({
|
channel_send.basic_publish(exchange=config.default.XMLRPCSERVER.amqp.status_queue, body=json.dumps({
|
||||||
'bnd_name': bnd_name,
|
'bnd_name': bnd_name,
|
||||||
'status': 'connected'
|
'status': 'connected'
|
||||||
}))
|
}))
|
||||||
@@ -208,7 +211,7 @@ def bnd_connected(bnd_name: str):
|
|||||||
def bnd_disconnected(bnd_name: str):
|
def bnd_disconnected(bnd_name: str):
|
||||||
global channel_send
|
global channel_send
|
||||||
logger.warning(f'{bnd_name} disconnected')
|
logger.warning(f'{bnd_name} disconnected')
|
||||||
channel_send.basic_publish(exchange=Config.rabbit_status_queue, body=json.dumps({
|
channel_send.basic_publish(exchange=config.default.XMLRPCSERVER.qmap.status_queue, body=json.dumps({
|
||||||
'bnd_name': bnd_name,
|
'bnd_name': bnd_name,
|
||||||
'status': 'disconnected'
|
'status': 'disconnected'
|
||||||
}))
|
}))
|
||||||
@@ -302,16 +305,16 @@ async def correction_replication(bnd_name: str, schema: str):
|
|||||||
res = rxmls.get_request_document(res_id, None)
|
res = rxmls.get_request_document(res_id, None)
|
||||||
ET.SubElement(res, 'currentCommit', {'scheme': schema})
|
ET.SubElement(res, 'currentCommit', {'scheme': schema})
|
||||||
params = {
|
params = {
|
||||||
'from': f'tcp://{Config.self_bnd}',
|
'from': f'tcp://{config.default.XMLRPCSERVER.self_bnd}',
|
||||||
'to': f'tcp://{bnd_name}',
|
'to': f'tcp://{bnd_name}',
|
||||||
'ts_added': date.timestamp(),
|
'ts_added': date.timestamp(),
|
||||||
'user_id': '0',
|
'user_id': '0',
|
||||||
'query_type': NEW_COMMIT_REQUEST,
|
'query_type': NEW_COMMIT_REQUEST,
|
||||||
'query_data': ET.tostring(res, encoding='unicode', xml_declaration=True),
|
'query_data': ET.tostring(res, encoding='unicode', xml_declaration=True),
|
||||||
}
|
}
|
||||||
proxy = ServerProxy(Config.enserver)
|
proxy = ServerProxy(config.default.XMLRPCSERVER.enserver)
|
||||||
try:
|
try:
|
||||||
proxy.send(params, [], Config.ret_path)
|
proxy.send(params, [], config.default.XMLRPCSERVER.ret_path)
|
||||||
except:
|
except:
|
||||||
logger.error('Error sending')
|
logger.error('Error sending')
|
||||||
|
|
||||||
@@ -324,7 +327,7 @@ def main():
|
|||||||
logger.setLevel(logging.INFO)
|
logger.setLevel(logging.INFO)
|
||||||
logger.warning('Use Control-C to exit')
|
logger.warning('Use Control-C to exit')
|
||||||
|
|
||||||
connection = pika.BlockingConnection(pika.URLParameters(Config.rabbit_conn))
|
connection = pika.BlockingConnection(pika.URLParameters(config.default.XMLRPCSERVER.amqp.conn))
|
||||||
channel_send = connection.channel()
|
channel_send = connection.channel()
|
||||||
|
|
||||||
xmlrpc_thread = threading.Thread(target=xmlrpc_task)
|
xmlrpc_thread = threading.Thread(target=xmlrpc_task)
|
||||||
|
|||||||
@@ -1,29 +1,3 @@
|
|||||||
|
from bestconfig import Config
|
||||||
|
|
||||||
class Config:
|
config = Config()
|
||||||
ret_path: str = 'http://10.10.8.81:9000/'
|
|
||||||
self_bnd: str = 'bnd127'
|
|
||||||
enserver: str = 'http://127.0.0.1:7000/xmlrpc'
|
|
||||||
|
|
||||||
pg_host: str = '10.10.8.83'
|
|
||||||
pg_port: int = 32101
|
|
||||||
pg_dbname: str = 'db'
|
|
||||||
pg_username: str = 'postgres'
|
|
||||||
pg_password: str = 'Root12345678'
|
|
||||||
|
|
||||||
oodb_host: str = '10.10.8.83'
|
|
||||||
oodb_port: int = 32100
|
|
||||||
oodb_dbname: str = 'db'
|
|
||||||
oodb_username: str = 'postgres'
|
|
||||||
oodb_passwd: str = 'Root12345678'
|
|
||||||
oodb_schema: str = 'documents_src'
|
|
||||||
|
|
||||||
rabbit_conn: str = 'amqp://user:password@10.10.8.83:31005/%2f'
|
|
||||||
rabbit_send_exchange: str = 'ipd_incoming_itv'
|
|
||||||
rabbit_incoming_queue: str = 'ipd_out_queue'
|
|
||||||
rabbit_status_queue: str = 'ipd_status_queue'
|
|
||||||
|
|
||||||
s3_endpoint: str = 'http://10.10.8.83:31006'
|
|
||||||
s3_key_id: str = 's57'
|
|
||||||
s3_access_key: str = 'd9MMinLF3U8TLSj'
|
|
||||||
s3_bucket_itv: str = 'itv'
|
|
||||||
|
|
||||||
|
|||||||
33
xmlrpcserver/config.yaml
Normal file
33
xmlrpcserver/config.yaml
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
version: 0.0.1
|
||||||
|
default:
|
||||||
|
XMLRPCSERVER:
|
||||||
|
ret_path: 'http://10.10.8.81:9000/'
|
||||||
|
self_bnd: 'bnd127'
|
||||||
|
enserver: 'http://127.0.0.1:7000/xmlrpc'
|
||||||
|
|
||||||
|
db:
|
||||||
|
host: '10.10.8.83'
|
||||||
|
port: 32101
|
||||||
|
dbname: 'db'
|
||||||
|
username: 'postgres'
|
||||||
|
password: 'Root12345678'
|
||||||
|
|
||||||
|
oodb:
|
||||||
|
host: '10.10.8.83'
|
||||||
|
port: 32100
|
||||||
|
dbname: 'db'
|
||||||
|
username: 'postgres'
|
||||||
|
password: 'Root12345678'
|
||||||
|
schema: 'documents_src'
|
||||||
|
|
||||||
|
amqp:
|
||||||
|
conn: 'amqp://user:password@10.10.8.83:31005/%2f'
|
||||||
|
send_exchange: 'ipd_incoming_itv'
|
||||||
|
incoming_queue: 'ipd_out_queue'
|
||||||
|
status_queue: 'ipd_status_queue'
|
||||||
|
|
||||||
|
s3:
|
||||||
|
endpoint: 'http://10.10.8.83:31006'
|
||||||
|
key_id: 's57'
|
||||||
|
access_key: 'd9MMinLF3U8TLSj'
|
||||||
|
bucket_itv: 'itv'
|
||||||
@@ -2,7 +2,7 @@ from datetime import datetime
|
|||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
from sqlalchemy import create_engine, String, select, ForeignKey, Enum
|
from sqlalchemy import create_engine, String, select, ForeignKey, Enum
|
||||||
from sqlalchemy.orm import Session, DeclarativeBase, Mapped, mapped_column, relationship
|
from sqlalchemy.orm import Session, DeclarativeBase, Mapped, mapped_column, relationship
|
||||||
from .config import Config
|
from .config import config
|
||||||
|
|
||||||
|
|
||||||
def tow(day: int, hour: int, minute: int):
|
def tow(day: int, hour: int, minute: int):
|
||||||
@@ -162,4 +162,4 @@ class Schemas(Base):
|
|||||||
|
|
||||||
|
|
||||||
def connect_db():
|
def connect_db():
|
||||||
return create_engine(f"postgresql+psycopg://{Config.pg_username}:{Config.pg_password}@{Config.pg_host}:{Config.pg_port}/{Config.pg_dbname}")
|
return create_engine(f"postgresql+psycopg://{config.default.XMLRPCSERVER.db.username}:{config.default.XMLRPCSERVER.db.password}@{config.default.XMLRPCSERVER.db.host}:{config.default.XMLRPCSERVER.db.port}/{config.default.XMLRPCSERVER.db.dbname}")
|
||||||
|
|||||||
@@ -1,46 +0,0 @@
|
|||||||
from gql import gql, Client
|
|
||||||
from gql.transport.aiohttp import AIOHTTPTransport
|
|
||||||
from .config import Config
|
|
||||||
|
|
||||||
transport = AIOHTTPTransport(url=Config.gql_url)
|
|
||||||
service = Config.gql_schema
|
|
||||||
|
|
||||||
|
|
||||||
def get_classifier():
|
|
||||||
client = Client(transport=transport, fetch_schema_from_transport=True, execute_timeout=None)
|
|
||||||
query = gql(
|
|
||||||
"""
|
|
||||||
query getClassifier($name: String!) {
|
|
||||||
getClassifier(name: $name)
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
result = client.execute(query, variable_values={"name": service}, )
|
|
||||||
return result['getClassifier']
|
|
||||||
|
|
||||||
|
|
||||||
def get_catalog():
|
|
||||||
client = Client(transport=transport, fetch_schema_from_transport=True, execute_timeout=None)
|
|
||||||
query = gql(
|
|
||||||
"""
|
|
||||||
query getCatalog($name: String!) {
|
|
||||||
getCatalog(name: $name)
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
result = client.execute(query, variable_values={"name": service})
|
|
||||||
return result['getCatalog']
|
|
||||||
|
|
||||||
|
|
||||||
def get_object(oid: str):
|
|
||||||
client = Client(transport=transport, fetch_schema_from_transport=True, execute_timeout=None)
|
|
||||||
query = gql(
|
|
||||||
"""
|
|
||||||
query getObjects($oid: String!, $name: String!) {
|
|
||||||
getObject(name: $name, oid: $oid)
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
params = {'oid': oid, 'name': service}
|
|
||||||
result = client.execute(query, variable_values=params)
|
|
||||||
return result['getObject']
|
|
||||||
Reference in New Issue
Block a user