Fix connections

This commit is contained in:
ashatora
2024-05-01 10:20:47 +03:00
parent 0a9db8d70c
commit 795074c7ab
7 changed files with 245 additions and 36 deletions

10
Dockerfile Normal file
View File

@@ -0,0 +1,10 @@
FROM reg.ivazh.ru/infra-oodb
WORKDIR /app
COPY query_itv ./query_itv
COPY requirements.txt ./
RUN pip3 install -r requirements.txt && \
mkdir -p /opt/tnt/bin && \
ln -s /usr/bin/python3 /opt/tnt/bin/python3
ENV LD_LIBRARY_PATH "/app"
ENV PYTHONPATH "${PYTHONPATH}:/app"
CMD ["python3", "-m", "query_itv"]

View File

@@ -1,4 +1,5 @@
from tempfile import TemporaryDirectory from json import JSONDecodeError
from tempfile import TemporaryDirectory, gettempdir
from typing import Optional, Any from typing import Optional, Any
import pika import pika
@@ -40,7 +41,8 @@ tasks = Queue()
connected = set() connected = set()
connection: Optional[pika.BlockingConnection] = None connection: Optional[pika.BlockingConnection] = None
channel: Optional[Any] = None channel_send: Optional[Any] = None
channel_receive: Optional[Any] = None
server: Optional[SimpleXMLRPCServer] = None server: Optional[SimpleXMLRPCServer] = None
logger = logging.getLogger('xmlrpcserver') logger = logging.getLogger('xmlrpcserver')
@@ -66,14 +68,46 @@ def upload_file(filename: str, key: str, bucket: str):
client.put_object(Body=f.read(), Bucket=bucket, Key=key) client.put_object(Body=f.read(), Bucket=bucket, Key=key)
def get_branch(bndname: str, scheme: str): def pika_callback(ch, method, properties, body):
conn = db.connect_db() try:
with Session(conn) as session: data = json.loads(body)
item = session.query(db.IncomeBranch).filter_by(scheme=scheme).join( params = data['params']
db.User).filter_by(bndname=bndname).one_or_none() url = data['url']
if item: files = []
return item.branch, item.local_scheme for file in data['files']:
return None, None fn = os.path.join(gettempdir(), uuid4().hex)
download_file(file['url']['name'], file['url']['bucket'], fn)
file['url'] = fn
files.append(file)
run_task(params['query_type'], params, files, url)
except JSONDecodeError as e:
logging.warning(e)
finally:
ch.basic_ack(delivery_tag=method.delivery_tag)
def send_response(params, files, url):
global channel_send
files_s3 = []
for file in files:
fn = uuid4().hex
upload_file(file['url'], fn, Config.s3_bucket)
file['url'] = {'name': fn, 'bucket': Config.s3_bucket}
files_s3.append(file)
data = {
'params': params,
'files': files_s3,
'url': url,
}
channel_send.basic_publish(exchange=Config.rabbit_out_exchange, body=json.dumps(data), routing_key='')
def pika_task():
global connection
global channel_receive
channel_receive = connection.channel()
channel_receive.basic_consume(queue=Config.rabbit_incoming_queue, on_message_callback=pika_callback)
channel_receive.start_consuming()
def run_tasks(): def run_tasks():
@@ -160,8 +194,7 @@ def get_data(params, files, url):
filename = fn + '.zip' filename = fn + '.zip'
filepath = os.path.join(os.getcwd(), 'tmp', filename) filepath = os.path.join(os.getcwd(), 'tmp', filename)
response_files = [{'name': filename, 'url': filepath, 'size': os.path.getsize(filepath)}] response_files = [{'name': filename, 'url': filepath, 'size': os.path.getsize(filepath)}]
proxy = ServerProxy(url) send_response(response_params, response_files, url)
proxy.send(response_params, response_files, Config.ret_path)
def receive_data(params, files, url): def receive_data(params, files, url):
@@ -200,29 +233,23 @@ def receive_data(params, files, url):
def run_task(query_type, params, files, url): def run_task(query_type, params, files, url):
if query_type == NEW_DATA_REQUEST: if query_type == NEW_DATA_REQUEST:
tasks.put(lambda: get_data(params, files, url)) get_data(params, files, url)
if query_type == NEW_DATA_RESPONSE: if query_type == NEW_DATA_RESPONSE:
tasks.put(lambda: receive_data(params, files, url)) receive_data(params, files, url)
def main(): def main():
global connection global connection
global server global server
global channel global channel_send
connection = pika.BlockingConnection(pika.URLParameters(Config.rabbit_conn))
channel_send = connection.channel()
logger.setLevel(logging.INFO) logger.setLevel(logging.INFO)
logger.warning('Use Control-C to exit') logger.warning('Use Control-C to exit')
thread = threading.Thread(target=run_tasks) pika_task()
thread.start()
try:
logger.warning('Start server')
uvicorn.run(app, host="0.0.0.0", port=8000)
except KeyboardInterrupt:
logger.warning('Exiting')
finally:
server.server_close()
def vers_key(e): def vers_key(e):

View File

@@ -20,19 +20,13 @@ class Config:
rabbit_conn: str = 'amqp://user:password@10.10.8.83:31005/%2f' rabbit_conn: str = 'amqp://user:password@10.10.8.83:31005/%2f'
rabbit_queue: str = 'ipd' rabbit_queue: str = 'ipd'
rabbit_incoming_queue: str = 'ipd_queue_queries'
ws_rabbit_params: dict = { rabbit_out_exchange: str = 'ipd_out_itv'
'host': '10.10.8.83',
'port': 31005,
'exchange': 'ipd',
'user': 'user',
'password': 'password',
}
s3_endpoint: str = 'http://10.10.8.83:31006' s3_endpoint: str = 'http://10.10.8.83:31006'
s3_key_id: str = 's57' s3_key_id: str = 's57'
s3_access_key: str = 'd9MMinLF3U8TLSj' s3_access_key: str = 'd9MMinLF3U8TLSj'
s3_bucket: str = 'files' s3_bucket: str = 'itv'
gql_url: str = 'https://gql.ivazh.ru/graphql' gql_url: str = 'https://gql.ivazh.ru/graphql'
gql_download: str = 'https://gql.ivazh.ru/item/{key}' gql_download: str = 'https://gql.ivazh.ru/item/{key}'

View File

@@ -0,0 +1,12 @@
{
"filestorage": {
"type": "s3",
"endpoint": "http://10.10.8.83:31006",
"key_id": "s57",
"access_key": "d9MMinLF3U8TLSj",
"download_path": "/tmp"
},
"file_code": "c1000",
"name_code": "c122",
"use_version": true
}

View File

@@ -0,0 +1,167 @@
{
"databases": {
"oodb_git": {
"host": "10.10.8.83",
"port": 32100,
"database": "db",
"user": "postgres",
"password": "Root12345678"
}
},
"workspaces": {
"documents_src": {
"type": "documents",
"group": "src",
"database": "oodb_git",
"schema": "documents_src",
"alias": "Документы исходная"
},
"documents_standard": {
"type": "documents",
"group": "order",
"database": "oodb_git",
"schema": "documents_standard",
"alias": "Документы эталон"
},
"documents_standard_pub": {
"type": "documents",
"group": "order",
"database": "oodb_git",
"schema": "documents_standard",
"alias": "Документы публичная"
},
"ood": {
"type": "npd",
"group": "src",
"database": "oodb_git",
"schema": "ood",
"alias": "ООБД исходные НПД",
"map_service": "VUE_APP_GISAIS_URL:/styles/ood/style.json"
},
"oodb": {
"type": "oodb",
"group": "order",
"database": "oodb_git",
"schema": "kartap",
"alias": "ООДБ эталон",
"map_service": "VUE_APP_GISAIS_URL:/styles/oodb/style.json"
},
"oodb_standard": {
"type": "oodb",
"group": "forming_standard",
"database": "oodb_git",
"schema": "kartap",
"alias": "ООДБ эталон",
"map_service": "VUE_APP_GISAIS_URL:/styles/oodb/style.json"
},
"oodb_working": {
"type": "oodb",
"group": "forming_work",
"database": "oodb_git",
"schema": "kartap",
"alias": "ООДБ рабочая",
"map_service": "VUE_APP_GISAIS_URL:/styles/oodb_tech/style.json"
},
"oodb_pub": {
"type": "oodb",
"group": "order",
"database": "oodb_git",
"schema": "kartap",
"alias": "ООБД публичная",
"map_service": "VUE_APP_GISAIS_URL:/styles/oodb/style.json"
},
"regions": {
"type": "regions",
"database": "oodb_git",
"schema": "regions_hard",
"alias": "Регионы",
"map_service": "VUE_APP_GISAIS_URL_GK:/styles/regions/style.json"
},
"regions_contour": {
"type": "regions",
"database": "oodb_git",
"schema": "regions_hard",
"alias": "Регионы",
"map_service": "VUE_APP_GISAIS_URL_GK:/styles/regions_contour/style.json"
},
"npd_9": {
"type": "npd",
"database": "oodb_git",
"schema": "npd_9",
"alias": "НПД 9.0"
},
"npd": {
"type": "npd",
"database": "oodb_git",
"schema": "initial",
"alias": "НПД 9.0"
},
"npd_831": {
"type": "npd",
"group": "order",
"database": "oodb_git",
"schema": "npd_831",
"alias": "НПД 8.31"
},
"updater_test": {
"type": "npd",
"group": "order",
"database": "oodb_git",
"schema": "npd_831_test",
"alias": "НПД 8.31 публичная"
},
"lukoil": {
"type": "oodb",
"database": "oodb_git",
"schema": "lukoil",
"alias": "ЛУКОЙЛ",
"map_service": "VUE_APP_GISAIS_URL_GK:/styles/lukoil/style.json"
},
"geocover": {
"type": "ecpz",
"group": "order",
"database": "oodb_git",
"schema": "coverage",
"alias": "ЕЦПЗ"
},
"geocover_test": {
"type": "ecpz",
"database": "oodb_git",
"schema": "coverage",
"alias": "ЕЦПЗ тест"
},
"gcmr": {
"type": "gcmr",
"group": "order",
"database": "oodb_git",
"schema": "gcmr",
"alias": "ГЦМР"
},
"orders": {
"type": "system",
"database": "oodb_git",
"schema": "orders",
"alias": "Заказы"
},
"ilo": {
"type": "system",
"database": "oodb_git",
"schema": "ilo",
"alias": "ИЛО"
},
"raz_sgok": {
"type": "raz_sgok",
"database": "razsgok",
"schema": "razsgok",
"alias": "СГОК",
"map_service": "VUE_APP_GISAIS_URL_GK:/styles/raz_sgok/style.json"
},
"raz_vtu": {
"type": "raz_vtu",
"database": "razvtu",
"schema": "razvtu",
"alias": "ВТУ",
"map_service": "VUE_APP_GISAIS_URL_GK:/styles/raz_vtu/style.json"
}
}
}

View File

@@ -2,7 +2,7 @@ from datetime import datetime
from typing import List, Optional from typing import List, Optional
from sqlalchemy import create_engine, String, select, ForeignKey, Enum from sqlalchemy import create_engine, String, select, ForeignKey, Enum
from sqlalchemy.orm import Session, DeclarativeBase, Mapped, mapped_column, relationship from sqlalchemy.orm import Session, DeclarativeBase, Mapped, mapped_column, relationship
from config import Config from .config import Config
def tow(day: int, hour: int, minute: int): def tow(day: int, hour: int, minute: int):

View File

@@ -25,7 +25,6 @@ pika-stubs==0.1.3
psycopg==3.1.10 psycopg==3.1.10
pydantic==2.3.0 pydantic==2.3.0
pydantic_core==2.6.3 pydantic_core==2.6.3
pygost==5.12
python-dateutil==2.8.2 python-dateutil==2.8.2
python-multipart==0.0.6 python-multipart==0.0.6
requests==2.31.0 requests==2.31.0